From 06349428b2cf9e4212507c36a3e69dbe1fddfb72 Mon Sep 17 00:00:00 2001 From: TheRealMoeder Date: Wed, 30 Jan 2019 20:10:47 +0100 Subject: [PATCH] Remove rst2pdf --- Makefile | 10 +- dist-packages/rst2pdf/CHANGES.txt | 544 - dist-packages/rst2pdf/Contributors.txt | 17 - dist-packages/rst2pdf/LICENSE.txt | 21 - dist-packages/rst2pdf/README.rst | 71 - dist-packages/rst2pdf/README.txt | 39 - dist-packages/rst2pdf/bootstrap.py | 113 - dist-packages/rst2pdf/buildout.cfg | 52 - dist-packages/rst2pdf/rst2pdf/__init__.py | 9 - .../rst2pdf/rst2pdf/aafigure_directive.py | 96 - .../rst2pdf/rst2pdf/basenodehandler.py | 289 - dist-packages/rst2pdf/rst2pdf/config.py | 39 - dist-packages/rst2pdf/rst2pdf/counter_role.py | 30 - dist-packages/rst2pdf/rst2pdf/createpdf.py | 1687 --- dist-packages/rst2pdf/rst2pdf/dumpstyle.py | 161 - .../rst2pdf/rst2pdf/extensions/__init__.py | 9 - .../rst2pdf/rst2pdf/extensions/dotted_toc.py | 152 - .../rst2pdf/rst2pdf/extensions/fancytitles.py | 144 - .../rst2pdf/extensions/inkscape_r2p.py | 88 - .../rst2pdf/rst2pdf/extensions/plantuml.py | 80 - .../rst2pdf/extensions/preprocess_r2p.py | 366 - .../rst2pdf/rst2pdf/extensions/sample.py | 20 - .../rst2pdf/extensions/vectorpdf_r2p.py | 155 - dist-packages/rst2pdf/rst2pdf/findfonts.py | 385 - dist-packages/rst2pdf/rst2pdf/flowables.py | 1044 -- dist-packages/rst2pdf/rst2pdf/genelements.py | 966 -- dist-packages/rst2pdf/rst2pdf/genpdftext.py | 235 - dist-packages/rst2pdf/rst2pdf/image.py | 491 - .../rst2pdf/rst2pdf/images/image-missing.jpg | Bin 5101 -> 0 bytes .../rst2pdf/rst2pdf/images/image-missing.png | Bin 11228 -> 0 bytes dist-packages/rst2pdf/rst2pdf/languages.py | 42 - dist-packages/rst2pdf/rst2pdf/log.py | 26 - .../rst2pdf/rst2pdf/math_directive.py | 111 - .../rst2pdf/rst2pdf/math_flowable.py | 176 - dist-packages/rst2pdf/rst2pdf/nodehandlers.py | 17 - .../rst2pdf/rst2pdf/oddeven_directive.py | 33 - dist-packages/rst2pdf/rst2pdf/opt_imports.py | 126 - dist-packages/rst2pdf/rst2pdf/pdfbuilder.py | 915 -- .../rst2pdf/rst2pdf/pygments2json.py | 56 - .../rst2pdf/rst2pdf/pygments2style.py | 73 - .../rst2pdf/pygments_code_block_directive.py | 396 - dist-packages/rst2pdf/rst2pdf/rson.py | 917 -- dist-packages/rst2pdf/rst2pdf/sectnumlinks.py | 19 - dist-packages/rst2pdf/rst2pdf/sinker.py | 33 - dist-packages/rst2pdf/rst2pdf/smartypants.py | 903 -- dist-packages/rst2pdf/rst2pdf/sphinxnodes.py | 242 - dist-packages/rst2pdf/rst2pdf/styles.py | 974 -- .../rst2pdf/rst2pdf/styles/11x17.style | 1 - dist-packages/rst2pdf/rst2pdf/styles/a0.style | 1 - .../rst2pdf/rst2pdf/styles/a1-landscape.style | 1 - dist-packages/rst2pdf/rst2pdf/styles/a1.style | 1 - .../rst2pdf/rst2pdf/styles/a2-landscape.style | 1 - dist-packages/rst2pdf/rst2pdf/styles/a2.style | 1 - .../rst2pdf/rst2pdf/styles/a3-landscape.style | 1 - dist-packages/rst2pdf/rst2pdf/styles/a3.style | 1 - .../rst2pdf/rst2pdf/styles/a4-landscape.style | 1 - dist-packages/rst2pdf/rst2pdf/styles/a4.style | 1 - .../rst2pdf/rst2pdf/styles/a5-landscape.style | 1 - dist-packages/rst2pdf/rst2pdf/styles/a5.style | 1 - .../rst2pdf/rst2pdf/styles/a6-landscape.style | 1 - dist-packages/rst2pdf/rst2pdf/styles/a6.style | 1 - .../rst2pdf/rst2pdf/styles/autumn.style | 118 - dist-packages/rst2pdf/rst2pdf/styles/b0.style | 1 - .../rst2pdf/rst2pdf/styles/b1-landscape.style | 1 - dist-packages/rst2pdf/rst2pdf/styles/b1.style | 1 - .../rst2pdf/rst2pdf/styles/b2-landscape.style | 1 - dist-packages/rst2pdf/rst2pdf/styles/b2.style | 1 - .../rst2pdf/rst2pdf/styles/b3-landscape.style | 1 - dist-packages/rst2pdf/rst2pdf/styles/b3.style | 1 - .../rst2pdf/rst2pdf/styles/b4-landscape.style | 1 - dist-packages/rst2pdf/rst2pdf/styles/b4.style | 1 - .../rst2pdf/rst2pdf/styles/b5-landscape.style | 1 - dist-packages/rst2pdf/rst2pdf/styles/b5.style | 1 - .../rst2pdf/rst2pdf/styles/b6-landscape.style | 1 - dist-packages/rst2pdf/rst2pdf/styles/b6.style | 1 - .../rst2pdf/rst2pdf/styles/borland.style | 146 - dist-packages/rst2pdf/rst2pdf/styles/bw.style | 202 - .../rst2pdf/rst2pdf/styles/colorful.style | 231 - .../rst2pdf/rst2pdf/styles/debugtables.style | 27 - .../rst2pdf/rst2pdf/styles/default.style | 166 - .../rst2pdf/rst2pdf/styles/dejavu.style | 14 - .../rst2pdf/rst2pdf/styles/double-sided.style | 1 - .../rst2pdf/rst2pdf/styles/eightpoint.style | 1 - .../rst2pdf/rst2pdf/styles/emacs.style | 166 - .../rst2pdf/styles/freetype-sans.style | 10 - .../rst2pdf/styles/freetype-serif.style | 10 - .../rst2pdf/rst2pdf/styles/friendly.style | 170 - .../rst2pdf/rst2pdf/styles/fruity.style | 171 - .../rst2pdf/rst2pdf/styles/kerning.style | 16 - .../rst2pdf/rst2pdf/styles/legal.style | 1 - .../rst2pdf/styles/letter-landscape.style | 1 - .../rst2pdf/rst2pdf/styles/letter.style | 1 - .../rst2pdf/rst2pdf/styles/manni.style | 178 - .../rst2pdf/rst2pdf/styles/monokai.style | 86 - .../rst2pdf/rst2pdf/styles/murphy.style | 235 - .../rst2pdf/rst2pdf/styles/native.style | 143 - .../rst2pdf/styles/no-compact-lists.style | 3 - .../rst2pdf/rst2pdf/styles/pastie.style | 231 - .../rst2pdf/rst2pdf/styles/perldoc.style | 142 - .../rst2pdf/rst2pdf/styles/serif.style | 5 - .../rst2pdf/rst2pdf/styles/sphinx.style | 334 - .../rst2pdf/rst2pdf/styles/styles.style | 584 - .../rst2pdf/rst2pdf/styles/tango.style | 198 - .../rst2pdf/rst2pdf/styles/tenpoint.style | 1 - .../rst2pdf/rst2pdf/styles/trac.style | 162 - .../rst2pdf/rst2pdf/styles/twelvepoint.style | 1 - .../rst2pdf/rst2pdf/styles/twocolumn.style | 4 - dist-packages/rst2pdf/rst2pdf/styles/vs.style | 94 - dist-packages/rst2pdf/rst2pdf/svgimage.py | 79 - .../rst2pdf/rst2pdf/templates/cover.tmpl | 15 - .../rst2pdf/templates/sphinxcover.tmpl | 40 - dist-packages/rst2pdf/rst2pdf/tenjin.py | 1078 -- dist-packages/rst2pdf/rst2pdf/uniconvsaver.py | 382 - dist-packages/rst2pdf/rst2pdf/utils.py | 417 - dist-packages/rst2pdf/rst2pdf/writer.py | 33 - dist-packages/rst2pdf/setup.cfg | 3 - dist-packages/rst2pdf/setup.py | 113 - dist-packages/wordaxe/.gitignore | 1 - dist-packages/wordaxe/LICENSE | 1 - dist-packages/wordaxe/README.txt | 1 - dist-packages/wordaxe/setup.py | 65 - .../wordaxe/wordaxe/BaseHyphenator.py | 336 - .../wordaxe/wordaxe/DCWHyphenator.py | 756 - .../wordaxe/wordaxe/ExplicitHyphenator.py | 199 - .../wordaxe/wordaxe/PyHnjHyphenator.py | 118 - dist-packages/wordaxe/wordaxe/__init__.py | 21 - dist-packages/wordaxe/wordaxe/dict/DEhyph.py | 2068 --- .../wordaxe/dict/README_hyph_de_DE.txt | 30 - .../wordaxe/dict/README_hyph_en_GB.txt | 18 - .../wordaxe/dict/README_hyph_en_US.txt | 18 - .../wordaxe/wordaxe/dict/__init__.py | 10 - .../wordaxe/wordaxe/dict/hyph_da.dic | 1146 -- .../wordaxe/wordaxe/dict/hyph_de.dic | 5799 -------- .../wordaxe/wordaxe/dict/hyph_de_DE.dic | 7500 ---------- .../wordaxe/wordaxe/dict/hyph_en.dic | 11388 ---------------- .../wordaxe/wordaxe/dict/hyph_en_GB.dic | 11388 ---------------- .../wordaxe/wordaxe/dict/hyph_en_US.dic | 11388 ---------------- .../wordaxe/wordaxe/dict/hyph_ru.dic | 3875 ------ dist-packages/wordaxe/wordaxe/hnj.py | 160 - dist-packages/wordaxe/wordaxe/hyphen.py | 357 - dist-packages/wordaxe/wordaxe/hyphrules.py | 464 - .../wordaxe/plugins/PyHyphenHyphenator.py | 114 - .../wordaxe/wordaxe/plugins/__init__.py | 17 - .../wordaxe/wordaxe/rl/NewParagraph.py | 1358 -- dist-packages/wordaxe/wordaxe/rl/__init__.py | 12 - .../wordaxe/wordaxe/rl/graphdocpy.py | 984 -- .../wordaxe/wordaxe/rl/kerning_info.py | 132 - .../wordaxe/wordaxe/rl/para_fragments.py | 360 - dist-packages/wordaxe/wordaxe/rl/paragraph.py | 11 - .../wordaxe/wordaxe/rl/paraparser.py | 31 - dist-packages/wordaxe/wordaxe/rl/rl_codecs.py | 1034 -- dist-packages/wordaxe/wordaxe/rl/styles.py | 118 - .../wordaxe/wordaxe/rl/xpreformatted.py | 101 - 153 files changed, 5 insertions(+), 79786 deletions(-) delete mode 100644 dist-packages/rst2pdf/CHANGES.txt delete mode 100644 dist-packages/rst2pdf/Contributors.txt delete mode 100644 dist-packages/rst2pdf/LICENSE.txt delete mode 100644 dist-packages/rst2pdf/README.rst delete mode 100644 dist-packages/rst2pdf/README.txt delete mode 100644 dist-packages/rst2pdf/bootstrap.py delete mode 100755 dist-packages/rst2pdf/buildout.cfg delete mode 100644 dist-packages/rst2pdf/rst2pdf/__init__.py delete mode 100644 dist-packages/rst2pdf/rst2pdf/aafigure_directive.py delete mode 100644 dist-packages/rst2pdf/rst2pdf/basenodehandler.py delete mode 100644 dist-packages/rst2pdf/rst2pdf/config.py delete mode 100644 dist-packages/rst2pdf/rst2pdf/counter_role.py delete mode 100644 dist-packages/rst2pdf/rst2pdf/createpdf.py delete mode 100755 dist-packages/rst2pdf/rst2pdf/dumpstyle.py delete mode 100644 dist-packages/rst2pdf/rst2pdf/extensions/__init__.py delete mode 100644 dist-packages/rst2pdf/rst2pdf/extensions/dotted_toc.py delete mode 100644 dist-packages/rst2pdf/rst2pdf/extensions/fancytitles.py delete mode 100644 dist-packages/rst2pdf/rst2pdf/extensions/inkscape_r2p.py delete mode 100644 dist-packages/rst2pdf/rst2pdf/extensions/plantuml.py delete mode 100644 dist-packages/rst2pdf/rst2pdf/extensions/preprocess_r2p.py delete mode 100644 dist-packages/rst2pdf/rst2pdf/extensions/sample.py delete mode 100644 dist-packages/rst2pdf/rst2pdf/extensions/vectorpdf_r2p.py delete mode 100644 dist-packages/rst2pdf/rst2pdf/findfonts.py delete mode 100644 dist-packages/rst2pdf/rst2pdf/flowables.py delete mode 100644 dist-packages/rst2pdf/rst2pdf/genelements.py delete mode 100644 dist-packages/rst2pdf/rst2pdf/genpdftext.py delete mode 100644 dist-packages/rst2pdf/rst2pdf/image.py delete mode 100644 dist-packages/rst2pdf/rst2pdf/images/image-missing.jpg delete mode 100644 dist-packages/rst2pdf/rst2pdf/images/image-missing.png delete mode 100644 dist-packages/rst2pdf/rst2pdf/languages.py delete mode 100644 dist-packages/rst2pdf/rst2pdf/log.py delete mode 100644 dist-packages/rst2pdf/rst2pdf/math_directive.py delete mode 100644 dist-packages/rst2pdf/rst2pdf/math_flowable.py delete mode 100644 dist-packages/rst2pdf/rst2pdf/nodehandlers.py delete mode 100644 dist-packages/rst2pdf/rst2pdf/oddeven_directive.py delete mode 100644 dist-packages/rst2pdf/rst2pdf/opt_imports.py delete mode 100644 dist-packages/rst2pdf/rst2pdf/pdfbuilder.py delete mode 100644 dist-packages/rst2pdf/rst2pdf/pygments2json.py delete mode 100644 dist-packages/rst2pdf/rst2pdf/pygments2style.py delete mode 100755 dist-packages/rst2pdf/rst2pdf/pygments_code_block_directive.py delete mode 100644 dist-packages/rst2pdf/rst2pdf/rson.py delete mode 100644 dist-packages/rst2pdf/rst2pdf/sectnumlinks.py delete mode 100644 dist-packages/rst2pdf/rst2pdf/sinker.py delete mode 100644 dist-packages/rst2pdf/rst2pdf/smartypants.py delete mode 100644 dist-packages/rst2pdf/rst2pdf/sphinxnodes.py delete mode 100644 dist-packages/rst2pdf/rst2pdf/styles.py delete mode 100644 dist-packages/rst2pdf/rst2pdf/styles/11x17.style delete mode 100644 dist-packages/rst2pdf/rst2pdf/styles/a0.style delete mode 100644 dist-packages/rst2pdf/rst2pdf/styles/a1-landscape.style delete mode 100644 dist-packages/rst2pdf/rst2pdf/styles/a1.style delete mode 100644 dist-packages/rst2pdf/rst2pdf/styles/a2-landscape.style delete mode 100644 dist-packages/rst2pdf/rst2pdf/styles/a2.style delete mode 100644 dist-packages/rst2pdf/rst2pdf/styles/a3-landscape.style delete mode 100644 dist-packages/rst2pdf/rst2pdf/styles/a3.style delete mode 100644 dist-packages/rst2pdf/rst2pdf/styles/a4-landscape.style delete mode 100644 dist-packages/rst2pdf/rst2pdf/styles/a4.style delete mode 100644 dist-packages/rst2pdf/rst2pdf/styles/a5-landscape.style delete mode 100644 dist-packages/rst2pdf/rst2pdf/styles/a5.style delete mode 100644 dist-packages/rst2pdf/rst2pdf/styles/a6-landscape.style delete mode 100644 dist-packages/rst2pdf/rst2pdf/styles/a6.style delete mode 100644 dist-packages/rst2pdf/rst2pdf/styles/autumn.style delete mode 100644 dist-packages/rst2pdf/rst2pdf/styles/b0.style delete mode 100644 dist-packages/rst2pdf/rst2pdf/styles/b1-landscape.style delete mode 100644 dist-packages/rst2pdf/rst2pdf/styles/b1.style delete mode 100644 dist-packages/rst2pdf/rst2pdf/styles/b2-landscape.style delete mode 100644 dist-packages/rst2pdf/rst2pdf/styles/b2.style delete mode 100644 dist-packages/rst2pdf/rst2pdf/styles/b3-landscape.style delete mode 100644 dist-packages/rst2pdf/rst2pdf/styles/b3.style delete mode 100644 dist-packages/rst2pdf/rst2pdf/styles/b4-landscape.style delete mode 100644 dist-packages/rst2pdf/rst2pdf/styles/b4.style delete mode 100644 dist-packages/rst2pdf/rst2pdf/styles/b5-landscape.style delete mode 100644 dist-packages/rst2pdf/rst2pdf/styles/b5.style delete mode 100644 dist-packages/rst2pdf/rst2pdf/styles/b6-landscape.style delete mode 100644 dist-packages/rst2pdf/rst2pdf/styles/b6.style delete mode 100644 dist-packages/rst2pdf/rst2pdf/styles/borland.style delete mode 100644 dist-packages/rst2pdf/rst2pdf/styles/bw.style delete mode 100644 dist-packages/rst2pdf/rst2pdf/styles/colorful.style delete mode 100644 dist-packages/rst2pdf/rst2pdf/styles/debugtables.style delete mode 100644 dist-packages/rst2pdf/rst2pdf/styles/default.style delete mode 100644 dist-packages/rst2pdf/rst2pdf/styles/dejavu.style delete mode 100644 dist-packages/rst2pdf/rst2pdf/styles/double-sided.style delete mode 100644 dist-packages/rst2pdf/rst2pdf/styles/eightpoint.style delete mode 100644 dist-packages/rst2pdf/rst2pdf/styles/emacs.style delete mode 100644 dist-packages/rst2pdf/rst2pdf/styles/freetype-sans.style delete mode 100644 dist-packages/rst2pdf/rst2pdf/styles/freetype-serif.style delete mode 100644 dist-packages/rst2pdf/rst2pdf/styles/friendly.style delete mode 100644 dist-packages/rst2pdf/rst2pdf/styles/fruity.style delete mode 100644 dist-packages/rst2pdf/rst2pdf/styles/kerning.style delete mode 100644 dist-packages/rst2pdf/rst2pdf/styles/legal.style delete mode 100644 dist-packages/rst2pdf/rst2pdf/styles/letter-landscape.style delete mode 100644 dist-packages/rst2pdf/rst2pdf/styles/letter.style delete mode 100644 dist-packages/rst2pdf/rst2pdf/styles/manni.style delete mode 100644 dist-packages/rst2pdf/rst2pdf/styles/monokai.style delete mode 100644 dist-packages/rst2pdf/rst2pdf/styles/murphy.style delete mode 100644 dist-packages/rst2pdf/rst2pdf/styles/native.style delete mode 100644 dist-packages/rst2pdf/rst2pdf/styles/no-compact-lists.style delete mode 100644 dist-packages/rst2pdf/rst2pdf/styles/pastie.style delete mode 100644 dist-packages/rst2pdf/rst2pdf/styles/perldoc.style delete mode 100644 dist-packages/rst2pdf/rst2pdf/styles/serif.style delete mode 100644 dist-packages/rst2pdf/rst2pdf/styles/sphinx.style delete mode 100644 dist-packages/rst2pdf/rst2pdf/styles/styles.style delete mode 100644 dist-packages/rst2pdf/rst2pdf/styles/tango.style delete mode 100644 dist-packages/rst2pdf/rst2pdf/styles/tenpoint.style delete mode 100644 dist-packages/rst2pdf/rst2pdf/styles/trac.style delete mode 100644 dist-packages/rst2pdf/rst2pdf/styles/twelvepoint.style delete mode 100644 dist-packages/rst2pdf/rst2pdf/styles/twocolumn.style delete mode 100644 dist-packages/rst2pdf/rst2pdf/styles/vs.style delete mode 100644 dist-packages/rst2pdf/rst2pdf/svgimage.py delete mode 100644 dist-packages/rst2pdf/rst2pdf/templates/cover.tmpl delete mode 100644 dist-packages/rst2pdf/rst2pdf/templates/sphinxcover.tmpl delete mode 100644 dist-packages/rst2pdf/rst2pdf/tenjin.py delete mode 100644 dist-packages/rst2pdf/rst2pdf/uniconvsaver.py delete mode 100644 dist-packages/rst2pdf/rst2pdf/utils.py delete mode 100644 dist-packages/rst2pdf/rst2pdf/writer.py delete mode 100644 dist-packages/rst2pdf/setup.cfg delete mode 100644 dist-packages/rst2pdf/setup.py delete mode 100644 dist-packages/wordaxe/.gitignore delete mode 120000 dist-packages/wordaxe/LICENSE delete mode 120000 dist-packages/wordaxe/README.txt delete mode 100755 dist-packages/wordaxe/setup.py delete mode 100755 dist-packages/wordaxe/wordaxe/BaseHyphenator.py delete mode 100755 dist-packages/wordaxe/wordaxe/DCWHyphenator.py delete mode 100755 dist-packages/wordaxe/wordaxe/ExplicitHyphenator.py delete mode 100755 dist-packages/wordaxe/wordaxe/PyHnjHyphenator.py delete mode 100755 dist-packages/wordaxe/wordaxe/__init__.py delete mode 100755 dist-packages/wordaxe/wordaxe/dict/DEhyph.py delete mode 100755 dist-packages/wordaxe/wordaxe/dict/README_hyph_de_DE.txt delete mode 100755 dist-packages/wordaxe/wordaxe/dict/README_hyph_en_GB.txt delete mode 100755 dist-packages/wordaxe/wordaxe/dict/README_hyph_en_US.txt delete mode 100755 dist-packages/wordaxe/wordaxe/dict/__init__.py delete mode 100755 dist-packages/wordaxe/wordaxe/dict/hyph_da.dic delete mode 100755 dist-packages/wordaxe/wordaxe/dict/hyph_de.dic delete mode 100755 dist-packages/wordaxe/wordaxe/dict/hyph_de_DE.dic delete mode 100755 dist-packages/wordaxe/wordaxe/dict/hyph_en.dic delete mode 100755 dist-packages/wordaxe/wordaxe/dict/hyph_en_GB.dic delete mode 100755 dist-packages/wordaxe/wordaxe/dict/hyph_en_US.dic delete mode 100755 dist-packages/wordaxe/wordaxe/dict/hyph_ru.dic delete mode 100755 dist-packages/wordaxe/wordaxe/hnj.py delete mode 100755 dist-packages/wordaxe/wordaxe/hyphen.py delete mode 100755 dist-packages/wordaxe/wordaxe/hyphrules.py delete mode 100755 dist-packages/wordaxe/wordaxe/plugins/PyHyphenHyphenator.py delete mode 100755 dist-packages/wordaxe/wordaxe/plugins/__init__.py delete mode 100755 dist-packages/wordaxe/wordaxe/rl/NewParagraph.py delete mode 100755 dist-packages/wordaxe/wordaxe/rl/__init__.py delete mode 100755 dist-packages/wordaxe/wordaxe/rl/graphdocpy.py delete mode 100755 dist-packages/wordaxe/wordaxe/rl/kerning_info.py delete mode 100755 dist-packages/wordaxe/wordaxe/rl/para_fragments.py delete mode 100755 dist-packages/wordaxe/wordaxe/rl/paragraph.py delete mode 100755 dist-packages/wordaxe/wordaxe/rl/paraparser.py delete mode 100755 dist-packages/wordaxe/wordaxe/rl/rl_codecs.py delete mode 100755 dist-packages/wordaxe/wordaxe/rl/styles.py delete mode 100755 dist-packages/wordaxe/wordaxe/rl/xpreformatted.py diff --git a/Makefile b/Makefile index 8f72c174f..78b7e3b4e 100644 --- a/Makefile +++ b/Makefile @@ -36,7 +36,7 @@ I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) -t devo8 -t devo10 source help: @echo "Please use \`make ' where is one of" @echo " html to make standalone HTML files" - @echo " pdf to make standalone PDF files via rst2pdf" +# @echo " pdf to make standalone PDF files via rst2pdf" @echo " epub to make an epub" @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" @echo " latexpdf to make LaTeX files and run them through pdflatex" @@ -116,10 +116,10 @@ pseudoxml: $(SPHINXBUILD) @echo @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml-$(TARGET)." -pdf: $(SPHINXBUILD) - $(SPHINXBUILD) -b pdf $(ALLSPHINXOPTS) $(BUILDDIR)/pdf-$(TARGET) - @echo - @echo "Build finished. The PDF files are in $(BUILDDIR)/pdf-$(TARGET)." +#pdf: $(SPHINXBUILD) +# $(SPHINXBUILD) -b pdf $(ALLSPHINXOPTS) $(BUILDDIR)/pdf-$(TARGET) +# @echo +# @echo "Build finished. The PDF files are in $(BUILDDIR)/pdf-$(TARGET)." venv: $(VENVDIR)/bin/activate $(BUILDDIR)/venv/bin/activate: requirements.txt diff --git a/dist-packages/rst2pdf/CHANGES.txt b/dist-packages/rst2pdf/CHANGES.txt deleted file mode 100644 index 54d800d36..000000000 --- a/dist-packages/rst2pdf/CHANGES.txt +++ /dev/null @@ -1,544 +0,0 @@ -New in 0.93 ------------ - -* Fixed Issue 447: Double-sided always starts on the right (By Rob Ludwick) - - * Removed --first-page-even as it was not used anywhere. - * Added --first-page-on-right - -* Fixed Issue 464: support alignment via :class: in image directives. -* Fixed Issue 482: Line blocks with indented parts get extraneous spacing -* Fixed Issue 470: Support for :target: in figures. -* New style "image" to be applied to image directives. -* Fixed Issue 485: Better styling support for figures/images (spaceBefore/After) -* Support rst2pdf [inf [outf]] syntax to be more compatible with rst2* -* Implemented Issue 389: New --strip-element-with-class option -* Fixed Issue 474: CellStyle1 is not there in reportlab 2.6 -* Removed default padding from DelayedTable, which looked bad - on headers/footers. -* Improvements to the math directive (font color and size) -* Better support for styling literals. -* Fixed Issue 454 (Splitting failure) -* Regressed Issue 374 (some literal blocks get oversplit) -* Switched from svglib to svg2rlg -* Removed uniconvertor support -* Fixed Issue 477: Sink footnote separator (patch by asermax) -* Fixed Issue 473: Support "code" directive like an alias of code-block. -* Fixed Issue 472: Implemented MyImage._unRestrictSize -* Fixed Issue 471: Respect class in lineblocks. -* Fixed Issue 455: New pisa/xhtml2pdf has very different imports -* Reopened Issue 289: Broken bullet customization. -* Reopened Issue 310: Line numbers in code blocks are wrong -* Reopened Issue 337: Bad layout with inline images in tables -* Marked Issue 358 as fixed. -* Fixed Issue 410: always include full lines in code-blocks (mmueller patch) -* Regression in fancytitles extension: Issue 486 - -New in 0.92 ------------ - -* Fixed Issue 394; missing _restrictSize method with RL 2.5 -* Fixed Issue 452: applying missing classes to lists crashed rst2pdf -* Fixed Issue 427: multiple spaces collapsed on inline literals. -* Fixed Issue 451: roman.py was moved in docutils 0.9 -* Fixed Issue 446: made it work again with python 2.4 - -New in 0.91 ------------ - -* Fixed Issue 438: sphinx support was completely broken in 0.90 - -New in 0.90 ------------ - -* Added raw HTML support, by Dimitri Christodoulou -* Fixed Issue 422: Having no .afm files made font lookup slow. -* Fixed Issue 411: Sometimes the windows registry has the font's abspath. -* Fixed Issue 430: Using --config option caused other options to - be ignored (by charles at cstanhope dot com) -* Fixed Issue 436: Add pdf_style_path to sphinx (by tyler@datastax.com) -* Fixed Issue 428: page numbers logged as errors -* Added support for many pygments options in code-block (by Joaquin Sorianello) -* Implemented Issue 404: plantuml support -* Issue 399: support sphinx's template path option -* Fixed Issue 406: calls to the wrong logging function -* Implemented Issue 391: New --section-header-depth option. -* Fixed Issue 390: the --config option was ignored. -* Added support for many pygments options in code-block (by Joaquin Sorianello) -* Fixed Issue 379: Wrong style applied to paragraphs in definitions. -* Fixed Issue 378: Multiline :address: were shown collapsed. -* Implemented Issue 11: FrameBreak (and conditional FrameBreak) -* The description of frames in page templates was just wrong. -* Fixed Issue 374: in some cases, literal blocks were split inside - a page, or the pagebreak came too early. -* Fixed Issue 370: warning about sphinx.addnodes.highlightlang not being - handled removed. -* Fixed Issue 369: crash in hyphenator when specifying "en" as a language. -* Compatibility fix to Sphinx 0.6.x (For python 2.7 docs) - -New in 0.16 ------------ - -* Fixed Issue 343: Plugged memory leak in the RSON parser. -* Fix for Issue 287: there is still a corner case if you have two sections - with the same title, at the same level, in the same page, in different files - where the links will break. -* Fixed Issue 367: german-localized dates are MM. DD. YYYY so when used in sphinx's - template cover they appeared weird, like a list item. Fixed with a minor workaround in - the template. -* Fixed Issue 366: links to "#" make no sense on a PDF file -* Made definitions from definition lists more stylable. -* Moved definition lists to SplitTables, so you can have very long - definitions. -* Fixed Issue 318: Implemented Domain specific indexes for Sphinx 1.0.x -* Fixed Index links when using Sphinx/pdfbuilder. -* Fixed Issue 360: Set literal.wordWrap to None by default so it doesn't inherit - wordWrap CJK when you use the otherwise correct japanese settings. In any case, - literal blocks are not supposed to wrap at all. -* Switched pdfbuilder to use SplitTables by default (it made no sense not to do it) -* Fixed Issue 365: some TTF fonts don't validate but they work anyway. -* Set a valid default baseurl for Sphinx (makes it much faster!) -* New feature: --use-numbered-links to show section numbers in links to sections, like "See section 2.3 Termination" -* Added stylesheets for landscape paper sizes (i.e: a4-landscape.style) -* Fixed Issue 364: Some options not respected when passed in per-doc options - in sphinx. -* Fixed Issue 361: multiple linebreaks in line blocks were collapsed. -* Fixed Issue 363: strange characters in some cases in math directive. -* Fixed Issue 362: Smarter auto-enclosing of equations in $...$ -* Fixed Issue 358: --real--footnotes defaults to False, but help text indicates default is True -* Fixed Issue 359: Wrong --fit-background-mode help string -* Fixed Issue 356: missing cells if a cell spawns rows and columns. -* Fixed Issue 349: Work correctly with languages that are available in form aa_bb and not aa (example: zh_cn) -* Fixed Issue 345: give file/line info when there is an error in a raw PDF directive. -* Fixed Issue 336: JPEG images should work even without PIL (but give a warning because - sizes will probably be wrong) -* Fixed Issue 351: footnote/citation references were generated incorrectly, which - caused problems if there was a citation with the same text as a heading. -* Fixed Issue 353: better handling of graphviz, so that it works without vectorpdf - but gives a warning about it. -* Fixed Issue 354: make todo_node from sphinx customizable. -* Fixed bug where nested lists broke page layout if the page was small. -* Smarter --inline-links option -* New extension: fancytitles, see http://lateral.netmanagers.com.ar/weblog/posts/BB906.html -* New feature: tab-width option in code-block directive (defaults to 8). -* Fixed Issue 340: endnotes/footnotes were not styled. -* Fixed Issue 339: class names using _ were not usable. -* Fixed Issue 335: ugly crash when using images in some - specific places (looks like a reportlab bug) -* Fixed Issue 329: make the figure alignment/class attributes - work more like LaTeX than HTML. -* Fixed Issue 328: list item styles were being ignored. -* Fixed Issue 186: new --use-floating-images makes images with - :align: set work like in HTML, with the next flowable flowing - beside it. -* Fixed Issue 307: header/footer from stylesheet now supports inline - rest markup and substitutions defined in the main document. -* New pdf_toc_depth option for Sphinx/pdfbuilder -* New pdf_use_toc option for Sphinx/pdfbuilder -* Fixed Issue 308: compatibility with reportlab from SVN -* Fixed Issue 323: errors in the config.sample made it work weird. -* Fixed Issue 322: Image substitutions didn't work in document title. -* Implemented Issue 321: underline and strikethrough available - in stylesheet. -* Fixed Issue 317: Ugly error message when file does not exist - -New in 0.15 ------------ - -* Fixed Issue 315: crash when using an undefined class for - a list. -* Implemented Issue 279: images can be specified as URLs. -* Fixed Issue 313: new --fit-background-mode option. -* Fixed Issue 110: new --real-footnotes option (buggy). -* Fixed Issue 176: spacers larger than a page don't crash. -* Fixed Issue 65: References to Helvetica/Times when it was not used. -* Fixed Issue 310: added option linenos_offset to code blocks. -* Fixed Issue 309: style for blockquotes was not respected. -* Custom cover page support (related to Issue 157) -* Fixed Issue 305: support wildcards in image names - and then use the best one available. -* Implemented Issue 298: counters -* Improved widow/orphan support for literal blocks -* Fixed Issue 304: Code blocks didn't respect fontSize in class. - -New in 0.14.2 -------------- - -* Regained compatibility with reportlab 2.3 -* Fixed regression in Issue 152: right-edege of boxes not aligned inside - list items. - -* Fixed Issue 301: accept padding parameters in bullet/item lists - -New in 0.14.1 -------------- - -* Make it compatible with Sphinx 0.6.3 again -* Fixed Issue 300: image-missing.jpg was not installed - -New in 0.14 ------------ - -* Fixed Issue 197: Table borders were confusing. -* Fixed Issue 297: styles from default.json leaked onto other syntax - highlighting stylesheets. -* Fixed Issue 295: keyword replacement in headers/footers didn't work - if ###Page### and others was inside a table. -* New feature: oddeven directive to display alternative content on - odd/even pages (good for headers/footers!) -* Switched all stylesheets to more readable RSON format. -* Fixed Issue 294: Images were deformed when only height was specified. -* Fixed Issue 293: Accept left/center/right as alignments in stylesheets. -* Fixed Issue 292: separate style for line numbers in codeblocks -* Fixed Issue 291: support class directive for codeblocks -* Fixed Issue 104: total number of pages in header/footer works in - all cases now. -* Fixed Issue 168: linenos and linenothreshold options in Sphinx now - work correctly. -* Fixed regression in 0.12 (interaction between rst2pdf and sphinx math) -* Documented extensions in the manual -* Better styling of bullets/items (Issue 289) -* Fixed Issue 290: don't fail on broken images -* Better font finding in windows (patch by techtonik, Issue 282). -* Fixed Issue 166: Implemented Sphinx's hlist (horizontal lists) -* Fixed Issue 284: Implemented production lists for sphinx -* Fixed Issue 165: Definition lists not properly indented inside - admonitions or tables. -* SVG Images work inline when using the inkscape extension. -* Fixed Issue 268: TOCs shifted to the left on RL 2.4 -* Fixed Issue 281: sphinx test automation was broken -* Fixed Issue 280: wrong page templates used in sphinx - -New in 0.13 ------------ - -* New TOC code (supports dots between title and page number) -* New extension framework -* New preprocessor extension -* New vectorpdf extension -* Support for nested stylesheets -* New headerSeparator/footerSeparator stylesheet options -* Foreground image support (useful for watermarks) -* Support transparency (alpha channel) when specifying colors -* Inkscape extension for much better SVG support -* Ability to show total page count in header/footer -* New RSON format for stylesheets (JSON superset) -* Fixed Issue 267: Support :align: in figures -* Fixed Issue 174 regression (Indented lines in line blocks) -* Fixed Issue 276: Load stylesheets from strings -* Fixed Issue 275: Extra space before lineblocks -* Fixed Issue 262: Full support for Reportlab 2.4 -* Fixed Issue 264: Splitting error in some documents -* Fixed Issue 261: Assert error with wordaxe -* Fixed Issue 251: added support for rst2pdf extensions when using sphinx -* Fixed Issue 256: ugly crash when using SVG images without SVG support -* Fixed Issue 257: support aafigure when using sphinx/pdfbuilder -* Initial support for graphviz extension in pdfbuilder -* Fixed Issue 249: Images distorted when specifiying width and height -* Fixed Issue 252: math directive conflicted with sphinx -* Fixed Issue 224: Tables can be left/center/right aligned in the page. -* Fixed Issue 243: Wrong spacing for second paragraphs in bullet lists. -* Big refactoring of the code. -* Support for Python 2.4 -* Fully reworked test suite, continuous integration site. -* Optionally use SWFtools for PDF images -* Fixed Issue 231 (Smarter TTF autoembed) -* Fixed Issue 232 (HTML tags in title metadata) -* Fixed Issue 247 (printing stylesheet) - -New in 0.12.3 -------------- - -* Fixed Issue 230 (Admonition titles were not translated) -* Fixed Issue 228 (page labels and numbers match, so page ii is the - same on-page and in the PDF TOC) -* Fixed Issue 227 (missing background should not be fatal error) -* Fixed Issue 225 (bad spacing in lineblocks) -* Fixed Issue 223 (non-monospaced styles used in code) - -New in 0.12.2 -------------- - -* Fix Issue 219 (incompatibility with reportlab 2.1) -* Added pdf_default_dpi option for pdfbuilder -* More style docs in the manual -* Better styling of lists -* Fix bug reported in comments in my blog where a stylesheet with - showHeader=True and no explicit header caused an exception. -* Fixed Issue 215: crashes in bookrest's background renderer. - -New in 0.12.1 -------------- - -* Ship local patched copy of pypoppler-qt4 -* Partial fix for Issue 205: KeyError: 'format' -* Fixed Issue 212: XML parsing error in bookrest -* Fixed Issue 210: pickle error in bookrest -* Switched --enable-splittables to True by default -* Fixed Issue 204: syntax error on font importing code - -New in 0.12 ------------ - -* Fixed Issue 202: broken processing of HTML raw nodes -* New "options" section in stylesheets. New ["options"]["stylesheets"] subsection, - which works similar to -s or to an include file: a list of stylesheets to be - processed before the current one. -* New --config option -* Fix for Issue 200 (position of frames was miscalculated) -* Fix For Issue 188 (uniconvertor "'unicode' object has no attribute 'readline'" error) -* New raw directive command: SetPageCounter. This enables - page counter manipulation, and use of different styles, - roman, lowerroman, alpha, loweralpha and arabic. -* New raw directive commands: EvenPageBreak and OddPageBreak -* New option to make sections break to odd or even pages: - --break-side=VALUE -* New option to add an empty page at the beginning of the - document: --blank-first-page. -* Fixed bug in authors field width calculation -* Support % in bullet and field lists column widths -* Use bullet_list or item_list styles for bullet and item lists respectively. -* Support % in field list column width description. -* Fix for Issue 184 (font metrics go crazy with TT font) -* New admonition code based on SplitTable (beta quality) -* Fix for Issue 180 (support for very very long list items. Needs testing) -* Fix for Issue 175 (widow/orphan titles) -* Fix for Issue 174 (line blocks didn't respect indentation) -* Worked around Issue 173 (quotes didn't indent inside table cells) -* Respect spaceBefore and spaceAfter for footnotes/endnotes -* Added tests for (almost) all of sphinx's custom markup -* Fixed Issue 170 (Wrong font embedding) -* Fixed Issue 171 (Damaged xref table) -* Fixed Issue 159 (Admonition and table widths were miscalculated) -* Fixed Issue 162 (wrong highlighting using sphinx) -* Changed default language policy as described in Issue 53 -* Fixed Issue 148 (Images should be looked for relative to source document) -* Fixed Issue 158 (Some admonitions crashed pdfbuilder) -* Fixed Issue 154 (incompatibility with RL 2.1) -* Fixed Issue 155 (crash when sidebars split in a certain way) -* Fixed issue 152 (padding and alignment of table styles, like - when using literal blocks inside lists) -* Integrated pdfbuilder sphinx extension (more work needed) -* Kerning support for true type fonts (thanks to wordaxe!), added - to the docs, added convenience stylesheet. -* Fixed Issue 151 and behaviour on Issue 116, about images too large - for available space / the full frame height. -* Fixed problem in admonition titles. -* Fixed section names in headers/footers: FIRST section on the page - is used, not LAST. -* Fixed Issue 145: padding of literal blocks was broken. -* Fixed bug: paragraphs with ids should have the matching anchors -* Fixed bug: internal references were not linked correctly -* Fixed Issue 144: PDF TOC had wrong page numbers in some cases -* More sphinx compatibility -* New table styles code, also make class directive work for tables -* Fixed Issue 140: html-like markup in titles was kept in the PDF TOC -* Fixed Issue 138: Redid figure styling. Also fixed bugs in BoxedContainer -* Fixed Issue 137: bugs in escaping characters in interpreted roles -* Make it work (in a slightly degraded mode) without PIL, as - long as you are only using JPGs or have PythonMagick installed. - This is good for OS X, where "installing PIL is a PITA" -* Fixed issue 134: entities were replaced in interpreted roles - (not needed) -* Support for aafigure (http://launchpad.net/aafigure) -* Spacers support units -* TOC styles now configurable in stylesheet - -New in 0.11 ------------ - -* Degrade more gracefully when one or more wordaxe hyphenators are - broken (currently DWC is the broken one) -* Fixed issue 132: in some cases, with user-defined fontAlias, bold and - italic would get confused (getting italic instead of bold in inline - markup, for instance). -* New stylesheet no-compact-lists to make lists... less compact -* SVG images now handle % as a width unit correctly. -* Implemented issue 127: support images in PDF format. Right now they - are rasterized, so it's not ideal. Perhaps something better will come up - later. -* Fixed issue 129: make it work around a prblem with KeepTogether in RL 2.1 - it probably makes the output look worse in some cases when using that. - RL 2.1 is not really supported, so added a warning. -* Fixed issue 130: use os.pathsep instead of ":" since ":" in windows is used - in disk names (and we still pay for DOS idiocy, in 2009) -* Fixed issue 128: headings level 3+ all looked the same -* Ugly bugfix for Issue 126: crashes when using images in header + TOC -* New tstyles section in the stylesheet provides more configurable list layouts - and more powerful table styling. -* Better syntax highlighting (supports bold/italic) -* Workaround for issue 103 so you can use borderPadding as a list (but it will look wrong - if you are using wordaxe <= 0.3.2) -* Added fieldvalue style for field lists -* Added optionlist tstyle, for option lists -* Added collection of utility stylesheets and documented it -* Improved command line parsing and stylesheet loading (guess - extension like latest rst2latex does) -* Fixed Issue 67: completely new list layouting code -* Fixed Issue 116: crashes caused by huge images -* Better support for %width in images, n2ow it's % of the container frame's - width, not of the text area. -* Fixed bug in SVG scaling -* Better handling of missing images -* Added missing styles abstract, contents, dedication to the default stylesheet -* Tables style support spaceBefore and spaceAfter -* New topic-title style for topic titles (obvious ;-) -* Vertical alignment for inline images (:align: parameter) -* Issue 118: Support for :scale: in images and handle resizing of inline images -* Issue 119: Fix placement of headers and footers -* New background property for page templates (nice for presentations, for example) -* Default to px for image width specifications instead of pt -* Support all required measurement units ("em" "ex" "px" "in" "cm" - "mm" "pt" "pc" "%" "") -* New automated scripts to check test cases for "visual differences" -* Respect images DPI property a bit like rst2latex does. -* Issue 110: New --inline-footnotes option -* Tested with reportlab from SVN trunk -* Support for Dinu Gherman's svglib. If both svglib and uniconvertor are available, - svglib is preferred (for SVG, of course). Patch originally by rute. -* Issue 109: Separate styles for each kind of admonition -* For Issue 109: missing styles are not a fatal error -* Issue 117: TOCs with more than 6 levels now supported (raised limit to 9, which - is silly deep) - -New in 0.10.1 -------------- - -* Issue 114: Fixed bug in PDF TOC for sections containing ampersands - -New in 0.10 ------------ - -* Issue 87: Table headers can be repeated in each page (thanks to Yasushi Masuda) -* Issue 93: Line number support for code blocks (:linenos: true) -* Issue 111: Added --no-footnote-backlinks option -* Issue 107: Support localized directives/roles (example: sommaire instead of contents) -* Issue 112: Fixed crash when processing empty list items -* Issue 98: Nobreak support, and set as default for inline-literals so they don't hyphenate. -* Slightly better tests -* Background colors in text styles work with reportlab 2.3 -* Issue 99: Fixed hyphenation in headers/footers (requires wordaxe 0.3.2) -* Issue 106: Crash on demo.txt fixed (requires wordxe 0.3.2) -* Issue 102: Implemented styles for bulleted and numbered lists -* Issue 38: Default headers/footers via options, config file or stylesheet -* Issue 88: Implemented much better book-style TOCs -* Issue 100: Fixed bug with headers/footers and Reportlab 2.3 -* Issue 95: Fixed bug with indented tables -* Issue 89: Implemented --version -* Issue 84: Fixed bug with relative include paths -* Issue 85: Fixed bug with table cell styles -* Issue 83: Fixed bug with numeric colors in backColor attribute -* Issue 44: Support for stdin and stdout -* Issue 79: Added --stylesheet-path option -* Issue 80: Send warnings to stderr, not stdout -* Issue 66: Implemented "smart quotes" -* Issue 77: Work around missing matplotlib -* Proper translation of labels (such as "Author", "Version" etc.) using the - docutils languages package. (r473) -* Fixed problems with wrong or non-existing fonts. (r484) -* Page transition effect support for presentations (r423) - - -New in 0.9 ----------- - -* Math support via Mathplotlib -* Huge bug in header/footer page numbers/section names fixed -* Several bugs in nested lists fixed (not 100% correct yet, but better) -* Lists that don't start at 1 work now -* Nicer definition lists - -New in 0.8.1 ------------- - -* Support for more complex headers and footers - (including image directives and tables) -* Optional inline links -* Wordaxe 0.2.6 support -* Several bugs fixed (issues 48,68,41,60,58,64,67) -* Support for system-wide config file -* Better author metadata - -New in 0.8 ----------- - -* Support for vector graphics: SVG, EPS, PS, CDR and others (requires uniconvertor) -* Support for stdin and stdout, so you can use rst2pdf in pipes. -* Works with reportlab 2.1 and 2.2 -* Simpler stylesheets (guess bulletFontName, leading, bulletFontSize from other parameters) -* Some support for sphinx -* Fixed the docutils Writer interface -* Continue processing when an image is missing -* Support for user config file -* Font sizes can be expressed in units or % of parent style's size -* Larger font size in the default stylesheet - -New in 0.7 ----------- - -* Automatic Type1 and True Type font embedding. Just use the font or family name, and (with a little luck), it will be embedded for you. -* width attribute in styles, to create narrow paragraphs/tables -* Styles for table headers and table cells -* "Zebra tables" -* Improvements in the handling of overflowing literal blocks (code, for instance) -* Different modes to handle too-large literal blocks: overflow/truncate/shrink. -* Real sidebars and "floating" elements. -* Fixed link style (no ugly black underlining!) - -New in 0.6 ----------- - -* Stylesheet-defined page layout (For example, multicolumn) and layout switching -* Cascading Stylesheets (change exactly what you need changed) -* PDF table of contents -* Current section names and numbers in headers/footers -* Support for compressed PDF files -* Link color is configurable -* Fixed bugs in color handling -* Multilingual hyphenation -* Auto-guessing image size, support for sizes in % -* Gutter margins -* Big refactoring -* More tolerant of minor problems -* Limited _raw_ directive (you can insert pagebreaks and vertical space) -* Implemented a "traditional" docutils writer -* Offer a reasonable API for use as a library -* Fixed copyright/licensing -* code-block now supports including files (whole or in part) so you can highlight external code. - - -New in 0.5 ----------- - -* Support for :widths: in tables -* Support for captions in tables -* Support for multi-row headers in tables -* Improved definition lists -* Fixed bug in image directive -* Whitespace conforming to PEP8 -* Fixed bug in text size on code-block -* Package is more setuptools compliant -* Fix for option groups in option lists -* Citations support -* Title reference role fix - -New in 0.4 ----------- - -* Fixed bullet and item lists indentation/nesting. -* Implemented citations -* Working links between footnotes and its references -* Justification enabled by default -* Fixed table bug (demo.txt works now) -* Title and author support in PDF properties -* Support for document title in header/footer -* Custom page sizes and margins - -New in 0.3 ----------- - -* Font embedding (use any True Type font in your PDFs) -* Syntax highlighter using Pygments -* User's manual -* External/custom stylesheets -* Support for page numbers in header/footer diff --git a/dist-packages/rst2pdf/Contributors.txt b/dist-packages/rst2pdf/Contributors.txt deleted file mode 100644 index 29a4b6776..000000000 --- a/dist-packages/rst2pdf/Contributors.txt +++ /dev/null @@ -1,17 +0,0 @@ -* Roberto Alsina -* Nicolas Laurance -* Christoph Zwerschke -* Yasushi Masuda -* Josh VanderLinden -* Runar Tenfjord -* Patrick Maupin -* Joshua J. Kugler -* Patrick Maupin -* anatoly techtonik -* Joaquin Sorianello -* tyler at datastax.com -* charles at stanhope.com -* al.yazdi -* Dimitri Christodoulou -* Paul Nation -* Robert Johansson diff --git a/dist-packages/rst2pdf/LICENSE.txt b/dist-packages/rst2pdf/LICENSE.txt deleted file mode 100644 index 6b5397b52..000000000 --- a/dist-packages/rst2pdf/LICENSE.txt +++ /dev/null @@ -1,21 +0,0 @@ -Copyright (c) 2007,2008,2009 Roberto Alsina -Nicolas Laurance, Christoph Zwerschke, Yasushi Masuda, Josh VanderLinden. - - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. diff --git a/dist-packages/rst2pdf/README.rst b/dist-packages/rst2pdf/README.rst deleted file mode 100644 index def219fcf..000000000 --- a/dist-packages/rst2pdf/README.rst +++ /dev/null @@ -1,71 +0,0 @@ -Intro -===== - -The usual way of creating PDF from reStructuredText is by going through LaTeX. -This tool provides an alternative by producing PDF directly using the ReportLab -library. - -More information is available `at the main website`__ - -__ http://rst2pdf.ralsina.me/stories/index.html - -Features -======== - -* User-defined page layout. Multiple frames per page, multiple layouts per - document. - -* Page transitions - -* Cascading stylesheet mechanism, define only what you want changed. - -* Supports TTF and Type1 font embedding. - -* Any number of paragraph styles using the class directive. - -* Any number of character styles using text roles. - -* Custom page sizes and margins. - -* Syntax highlighter for many languages, using Pygments. - -* Supports embedding almost any kind of raster or vector images. - -* Supports hyphenation and kerning (using wordaxe). - -* `Full user's manual`__ - -__ http://ralsina.me/static/manual.pdf - -Installation and use -==================== - -Install from PyPI ------------------ - -The latest released version, 0.93, may be installed from PyPI by using -pip or easy_install. It does not support Python 3:: - - sudo pip install rst2pdf - -Install from github --------------------- - -Work on rst2pdf has restarted on github, with the goals of supporting -Python 3, addressing outstanding issues, and not breaking anything. You -can clone the repository and install this version:: - - git clone https://github.com/rst2pdf/rst2pdf my_clone_name - cd my_clone_name - git checkout # if you want something other than master - sudo python setup.py install - -You may want to install it in a virtualenv, but that is beyond the scope -of this readme. - -Quick-start ------------- - -To convert a restructuredText document to a PDF, simply:: - - rst2pdf output.pdf diff --git a/dist-packages/rst2pdf/README.txt b/dist-packages/rst2pdf/README.txt deleted file mode 100644 index 4cc187cea..000000000 --- a/dist-packages/rst2pdf/README.txt +++ /dev/null @@ -1,39 +0,0 @@ -Intro -===== - -The usual way of creating PDF from reStructuredText is by going through LaTeX. -This tool provides an alternative by producing PDF directly using the ReportLab -library. - -Installing -========== - -python setup.py install - -should do the trick. - -Features -======== - -* User-defined page layout. Multiple frames per page, multiple layouts per - document. - -* Page transitions - -* Cascading stylesheet mechanism, define only what you want changed. - -* Supports TTF and Type1 font embedding. - -* Any number of paragraph styles using the class directive. - -* Any number of character styles using text roles. - -* Custom page sizes and margins. - -* Syntax highlighter for many languages, using Pygments. - -* Supports embedding almost any kind of raster or vector images. - -* Supports hyphenation and kerning (using wordaxe). - -* Full user's manual diff --git a/dist-packages/rst2pdf/bootstrap.py b/dist-packages/rst2pdf/bootstrap.py deleted file mode 100644 index a54547774..000000000 --- a/dist-packages/rst2pdf/bootstrap.py +++ /dev/null @@ -1,113 +0,0 @@ -############################################################################## -# -# Copyright (c) 2006 Zope Corporation and Contributors. -# All Rights Reserved. -# -# This software is subject to the provisions of the Zope Public License, -# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. -# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED -# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS -# FOR A PARTICULAR PURPOSE. -# -############################################################################## -"""Bootstrap a buildout-based project - -Simply run this script in a directory containing a buildout.cfg. -The script accepts buildout command-line options, so you can -use the -c option to specify an alternate configuration file. - -$Id$ -""" - -import os, shutil, sys, tempfile, urllib2 -from optparse import OptionParser - -tmpeggs = tempfile.mkdtemp() - -is_jython = sys.platform.startswith('java') - -# parsing arguments -parser = OptionParser() -parser.add_option("-v", "--version", dest="version", - help="use a specific zc.buildout version") -parser.add_option("-d", "--distribute", - action="store_true", dest="distribute", default=False, - help="Use Disribute rather than Setuptools.") - -options, args = parser.parse_args() - -if options.version is not None: - VERSION = '==%s' % options.version -else: - VERSION = '' - -USE_DISTRIBUTE = options.distribute -args = args + ['bootstrap'] - -to_reload = False -try: - import pkg_resources - if not hasattr(pkg_resources, '_distribute'): - to_reload = True - raise ImportError -except ImportError: - ez = {} - if USE_DISTRIBUTE: - exec urllib2.urlopen('http://python-distribute.org/distribute_setup.py' - ).read() in ez - ez['use_setuptools'](to_dir=tmpeggs, download_delay=0, no_fake=True) - else: - exec urllib2.urlopen('http://peak.telecommunity.com/dist/ez_setup.py' - ).read() in ez - ez['use_setuptools'](to_dir=tmpeggs, download_delay=0) - - if to_reload: - reload(pkg_resources) - else: - import pkg_resources - -if sys.platform == 'win32': - def quote(c): - if ' ' in c: - return '"%s"' % c # work around spawn lamosity on windows - else: - return c -else: - def quote (c): - return c - -cmd = 'from setuptools.command.easy_install import main; main()' -ws = pkg_resources.working_set - -if USE_DISTRIBUTE: - requirement = 'distribute' -else: - requirement = 'setuptools' - -if is_jython: - import subprocess - - assert subprocess.Popen([sys.executable] + ['-c', quote(cmd), '-mqNxd', - quote(tmpeggs), 'zc.buildout' + VERSION], - env=dict(os.environ, - PYTHONPATH= - ws.find(pkg_resources.Requirement.parse(requirement)).location - ), - ).wait() == 0 - -else: - assert os.spawnle( - os.P_WAIT, sys.executable, quote (sys.executable), - '-c', quote (cmd), '-mqNxd', quote (tmpeggs), 'zc.buildout' + VERSION, - dict(os.environ, - PYTHONPATH= - ws.find(pkg_resources.Requirement.parse(requirement)).location - ), - ) == 0 - -ws.add_entry(tmpeggs) -ws.require('zc.buildout' + VERSION) -import zc.buildout.buildout -zc.buildout.buildout.main(args) -shutil.rmtree(tmpeggs) diff --git a/dist-packages/rst2pdf/buildout.cfg b/dist-packages/rst2pdf/buildout.cfg deleted file mode 100755 index 868697d7b..000000000 --- a/dist-packages/rst2pdf/buildout.cfg +++ /dev/null @@ -1,52 +0,0 @@ -[buildout] -develop = . -parts = rst2pdf - - -[rst2pdf] -recipe = zc.recipe.egg:scripts -eggs = rst2pdf [svgsupport, images, aafiguresupport, tests] - sphinx - docutils>0.9 - reportlab - aafigure - nose - coverage - PIL - pdfrw - svg2rlg -find-links = http://distfiles.minitage.org/public/externals/minitage/ - -[rst2pdf-rl24] -recipe = zc.recipe.egg:scripts -eggs = rst2pdf [svgsupport, images, aafiguresupport, tests] - sphinx - docutils - reportlab==2.4 - aafigure - nose - coverage - PIL - pdfrw - svg2rlg -find-links = http://distfiles.minitage.org/public/externals/minitage/ - -[rst2pdf-rl25] -recipe = zc.recipe.egg:scripts -eggs = rst2pdf [svgsupport, images, aafiguresupport, tests] - sphinx - docutils - reportlab==2.5 - aafigure - nose - coverage - PIL - pdfrw - svg2rlg -find-links = http://distfiles.minitage.org/public/externals/minitage/ - -[wordaxe] -recipe = zc.recipe.egg -eggs = wordaxe - pyhyphen -find-links = http://sourceforge.net/projects/deco-cow/files/ diff --git a/dist-packages/rst2pdf/rst2pdf/__init__.py b/dist-packages/rst2pdf/rst2pdf/__init__.py deleted file mode 100644 index 119cd3424..000000000 --- a/dist-packages/rst2pdf/rst2pdf/__init__.py +++ /dev/null @@ -1,9 +0,0 @@ -# See LICENSE.txt for licensing terms -try: - import pkg_resources - try: - version = pkg_resources.get_distribution('rst2pdf').version - except pkg_resources.ResolutionError: - version = None -except ImportError: - version = None diff --git a/dist-packages/rst2pdf/rst2pdf/aafigure_directive.py b/dist-packages/rst2pdf/rst2pdf/aafigure_directive.py deleted file mode 100644 index 5a0aadea0..000000000 --- a/dist-packages/rst2pdf/rst2pdf/aafigure_directive.py +++ /dev/null @@ -1,96 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright (c) 2009 by Leandro Lucarella, Roberto Alsina -# All rights reserved. - -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are -# met: - -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. - -# * Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in the -# documentation and/or other materials provided with the distribution. - -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -from docutils import nodes -from docutils.nodes import General, Inline, Element, literal_block -from docutils.parsers.rst import directives -from docutils.parsers.rst import nodes -from reportlab.graphics import renderPDF -from docutils.parsers import rst -from opt_imports import aafigure -from log import log - -WARNED=False - -class Aanode(Element): - children = () - - def __init__(self, content, options, rawsource='', *children, **attributes): - self.content = content - self.options = options - Element.__init__(self, rawsource, *children, **attributes) - - def copy(self, **attributes): - return Aanode(self.content, self.options, **self.attributes) - - def gen_flowable(self, style_options): - options = dict(style_options) - # explicit :option: always precedes - options.update(self.options) - visitor = aafigure.process( - '\n'.join(self.content), - aafigure.pdf.PDFOutputVisitor, - options=options) - return renderPDF.GraphicsFlowable(visitor.drawing) - - -class Aafig(rst.Directive): - """ - Directive to insert an ASCII art figure to be rendered by aafigure. - """ - has_content = True - required_arguments = 0 - optional_arguments = 0 - final_argument_whitespace = False - option_spec = dict( - scale = float, - line_width = float, - background = str, - foreground = str, - fill = str, - name = str, - aspect = float, - textual = directives.flag, - proportional = directives.flag, - ) - - def run(self): - global WARNED - if 'textual' in self.options: - self.options['textual'] = True - if 'proportional' in self.options: - self.options['proportional'] = True - if aafigure is not None: - return [Aanode(self.content, self.options)] - if not WARNED: - log.error('To render the aafigure directive correctly, please install aafigure') - WARNED=True - return [literal_block(text='\n'.join(self.content))] - - -directives.register_directive('aafig', Aafig) -directives.register_directive('aafigure', Aafig) diff --git a/dist-packages/rst2pdf/rst2pdf/basenodehandler.py b/dist-packages/rst2pdf/rst2pdf/basenodehandler.py deleted file mode 100644 index d757bea1b..000000000 --- a/dist-packages/rst2pdf/rst2pdf/basenodehandler.py +++ /dev/null @@ -1,289 +0,0 @@ -# -*- coding: utf-8 -*- -# See LICENSE.txt for licensing terms -#$URL$ -#$Date$ -#$Revision$ - -''' -This module provides one useful class: NodeHandler - -The NodeHandler class is designed to be subclassed. Each subclass -should support the processing that createpdf.RstToPdf needs to do -on a particular type of node that could appear in a document tree. - -When the subclass is defined, it should reference NodeHandler as -the first base class, and one or more docutils node classes as -subsequent base classes. - -These docutils node classes will not actually wind up in the -base classes of the subclass. Instead, they will be used as -keys in a dispatch dictionary which is used to find the correct -NodeHandler subclass to use to process an instance of a given -docutils node class. - -When an instance of createpdf.RstToPdf is created, a NodeHandler -instance will be called to return dispatchers for gather_elements -and gather_pdftext, wrapped up as methods of the createpdf.RstToPdf -class. - -When a dispatcher is called, it will dispatch to the correct subclass -to handle the given docutils node instance. - -If no NodeHandler subclass has been created to handle that particular -type of docutils node, then default processing will occur and a warning -will be logged. -''' - -import types -import inspect -from log import log, nodeid -from smartypants import smartyPants -import docutils.nodes -from flowables import BoundByWidth, TocEntry - -class MetaHelper(type): - ''' MetaHelper is designed to generically enable a few of the benefits of - using metaclasses by encapsulating some of the complexity of setting - them up. - - If a base class uses MetaHelper (by assigning __metaclass__ = MetaHelper), - then that class (and its metaclass inheriting subclasses) can control - class creation behavior by defining a couple of helper functions. - - 1) A base class can define a _classpreinit function. This function - is called during __new__ processing of the class object itself, - but only during subclass creation (not when the class defining - the _classpreinit is itself created). - - The subclass object does not yet exist at the time _classpreinit - is called. _classpreinit accepts all the parameters of the - __new__ function for the class itself (not the same as the __new__ - function for the instantiation of class objects!) and must return - a tuple of the same objects. A typical use of this would be to - modify the class bases before class creation. - - 2) Either a base class or a subclass can define a _classinit() function. - This function will be called immediately after the actual class has - been created, and can do whatever setup is required for the class. - Note that every base class (but not every subclass) which uses - MetaHelper MUST define _classinit, even if that definition is None. - - MetaHelper also places an attribute into each class created with it. - _baseclass is set to None if this class has no superclasses which - also use MetaHelper, or to the first such MetaHelper-using baseclass. - _baseclass can be explicitly set inside the class definition, in - which case MetaHelper will not override it. - ''' - def __new__(clstype, name, bases, clsdict): - # Our base class is the first base in the class definition which - # uses MetaHelper, or None if no such base exists. - base = ([x for x in bases if type(x) is MetaHelper] + [None])[0] - - # Only set our base into the class if it has not been explicitly - # set - clsdict.setdefault('_baseclass', base) - - # See if the base class definied a preinit function, and call it - # if so. - preinit = getattr(base, '_classpreinit', None) - if preinit is not None: - clstype, name, bases, clsdict = preinit(clstype, name, bases, clsdict) - - # Delegate the real work to type - return type.__new__(clstype, name, bases, clsdict) - - def __init__(cls, name, bases, clsdict): - # Let type build the class for us - type.__init__(cls, name, bases, clsdict) - # Call the class's initialization function if defined - if cls._classinit is not None: - cls._classinit() - - -class NodeHandler(object): - ''' NodeHandler classes are used to dispatch - to the correct class to handle some node class - type, via a dispatchdict in the main class. - ''' - __metaclass__ = MetaHelper - - @classmethod - def _classpreinit(baseclass, clstype, name, bases, clsdict): - # _classpreinit is called before the actual class is built - # Perform triage on the class bases to separate actual - # inheritable bases from the target docutils node classes - # which we want to dispatch for. - - new_bases = [] - targets = [] - for target in bases: - if target is not object: - (targets, new_bases)[issubclass(target, NodeHandler)].append(target) - clsdict['_targets'] = targets - return clstype, name, tuple(new_bases), clsdict - - @classmethod - def _classinit(cls): - # _classinit() is called once the subclass has actually - # been created. - - # For the base class, just add a dispatch dictionary - if cls._baseclass is None: - cls.dispatchdict = {} - return - - # for subclasses, instantiate them, and then add - # the class to the dispatch dictionary for each of its targets. - self = cls() - for target in cls._targets: - if cls.dispatchdict.setdefault(target, self) is not self: - t = repr(target) - old = repr(cls.dispatchdict[target]) - new = repr(self) - log.debug('Dispatch handler %s for node type %s overridden by %s' % - (old, t, new)) - cls.dispatchdict[target] = self - - @staticmethod - def getclassname(obj): - cln = repr(obj.__class__) - info = cln.split("'") - if len(info) == 3: - return info[1] - return cln - - def log_unknown(self, node, during): - if not hasattr(self, 'unkn_node'): - self.unkn_node = set() - cln=self.getclassname(node) - if not cln in self.unkn_node: - self.unkn_node.add(cln) - log.warning("Unkn. node (self.%s): %s [%s]", - during, cln, nodeid(node)) - try: - log.debug(node) - except (UnicodeDecodeError, UnicodeEncodeError): - log.debug(repr(node)) - - def findsubclass(self, node, during): - handlerinfo = '%s.%s' % (self.getclassname(self), during) - log.debug("%s: %s", handlerinfo, self.getclassname(node)) - log.debug("%s: [%s]", handlerinfo, nodeid(node)) - try: - log.debug("%s: %s", handlerinfo, node) - except (UnicodeDecodeError, UnicodeEncodeError): - log.debug("%s: %r", handlerninfo, node) - log.debug("") - - # Dispatch to the first matching class in the MRO - - dispatchdict = self.dispatchdict - for baseclass in inspect.getmro(node.__class__): - result = dispatchdict.get(baseclass) - if result is not None: - break - else: - self.log_unknown(node, during) - result = self - return result - - def __call__(self, client): - ''' Get the dispatchers, wrapped up as methods for the client''' - textdispatch = types.MethodType(self.textdispatch, client) - elemdispatch = types.MethodType(self.elemdispatch, client) - return textdispatch, elemdispatch - - # This overridable attribute will be set true in the instance - # if handling a sphinx document - - sphinxmode = False - - # Begin overridable attributes and methods for elemdispatch - - def gather_elements(self, client, node, style): - return client.gather_elements(node, style=style) - - def getstyle(self, client, node, style): - try: - if node['classes'] and node['classes'][0]: - # FIXME: Supports only one class, sorry ;-) - if client.styles.StyleSheet.has_key(node['classes'][0]): - style = client.styles[node['classes'][0]] - else: - log.info("Unknown class %s, ignoring. [%s]", - node['classes'][0], nodeid(node)) - except TypeError: # Happens when a docutils.node.Text reaches here - pass - - if style is None or style == client.styles['bodytext']: - style = client.styles.styleForNode(node) - return style - - def getelements(self, client, node, style): - style = self.getstyle(client, node, style) - elements = self.gather_elements(client, node, style) - - # Make all the sidebar cruft unreachable - #if style.__dict__.get('float','None').lower() !='none': - #node.elements=[Sidebar(node.elements,style)] - #elif 'width' in style.__dict__: - - if 'width' in style.__dict__: - elements = [BoundByWidth(style.width, - elements, style, mode="shrink")] - - return elements - - # End overridable attributes and methods for elemdispatch - - def elemdispatch(self, client, node, style=None): - self = self.findsubclass(node, 'elemdispatch') - - # set anchors for internal references - try: - for i in node['ids']: - client.pending_targets.append(i) - except TypeError: #Happens with docutils.node.Text - pass - - elements = self.getelements(client, node, style) - - if node.line and client.debugLinesPdf: - elements.insert(0,TocEntry(client.depth-1,'LINE-%s'%node.line)) - node.elements = elements - return elements - - # Begin overridable attributes and methods for textdispatch - - pre = '' - post = '' - - def get_pre_post(self, client, node, replaceEnt): - return self.pre, self.post - - def get_text(self, client, node, replaceEnt): - return client.gather_pdftext(node, replaceEnt=replaceEnt) - - def apply_smartypants(self, text, smarty, node): - # Try to be clever about when to use smartypants - if node.__class__ in (docutils.nodes.paragraph, - docutils.nodes.block_quote, docutils.nodes.title): - return smartyPants(text, smarty) - return text - - # End overridable attributes and methods for textdispatch - - def textdispatch(self, client, node, replaceEnt=True): - self = self.findsubclass(node, 'textdispatch') - pre, post = self.get_pre_post(client, node, replaceEnt) - text = self.get_text(client, node, replaceEnt) - text = pre + text + post - - try: - log.debug("%s.textdispatch: %s" % (self.getclassname(self), text)) - except UnicodeDecodeError: - pass - - text = self.apply_smartypants(text, client.smarty, node) - node.pdftext = text - return text diff --git a/dist-packages/rst2pdf/rst2pdf/config.py b/dist-packages/rst2pdf/rst2pdf/config.py deleted file mode 100644 index bd9ca2c07..000000000 --- a/dist-packages/rst2pdf/rst2pdf/config.py +++ /dev/null @@ -1,39 +0,0 @@ -# -*- coding: utf-8 -*- -# See LICENSE.txt for licensing terms -"""Singleton config object""" - - -import ConfigParser -import os -from rst2pdf.rson import loads - -cfdir = os.path.join(os.path.expanduser('~'), '.rst2pdf') -cfname = os.path.join(cfdir, 'config') - - -def getValue(section, key, default=None): - section = section.lower() - key = key.lower() - try: - return loads(conf.get(section, key)) - except Exception: - return default - - -class ConfigError(Exception): - - def __init__(self, modulename, msg): - self.modulename = modulename - self.msg = msg - -conf = ConfigParser.SafeConfigParser() - -def parseConfig(extracf=None): - global conf - cflist = ["/etc/rst2pdf.conf", cfname] - if extracf: - cflist.append(extracf) - conf = ConfigParser.SafeConfigParser() - conf.read(cflist) - -parseConfig() diff --git a/dist-packages/rst2pdf/rst2pdf/counter_role.py b/dist-packages/rst2pdf/rst2pdf/counter_role.py deleted file mode 100644 index 43b832bd8..000000000 --- a/dist-packages/rst2pdf/rst2pdf/counter_role.py +++ /dev/null @@ -1,30 +0,0 @@ -# -*- coding: utf-8 -*- - -from docutils.nodes import Text, target - -values = {} - - -class CounterNode(Text): - children = () - def __init__(self, data, rawsource=''): - if ':' in data: - self.name, value = [s.lower() for s in data.split(':')][:2] - self.value=int(value) - else: - self.name=data.lower() - self.value=values.get(self.name,1) - values[self.name]=self.value+1 - - def astext(self): - return unicode(self.value) - -def counter_fn(name, rawtext, text, lineno, inliner, options={}, content=[]): - n=CounterNode(text) - s='%s-%s'%(n.name, n.value) - return [target(ids=[s]),n], [] - -counter_fn.content=True - -from docutils.parsers.rst import roles -roles.register_canonical_role('counter', counter_fn) diff --git a/dist-packages/rst2pdf/rst2pdf/createpdf.py b/dist-packages/rst2pdf/rst2pdf/createpdf.py deleted file mode 100644 index be1e765b0..000000000 --- a/dist-packages/rst2pdf/rst2pdf/createpdf.py +++ /dev/null @@ -1,1687 +0,0 @@ -# -*- coding: utf-8 -*- - -#$URL$ -#$Date$ -#$Revision$ - -# See LICENSE.txt for licensing terms - -# Some fragments of code are copied from Reportlab under this license: -# -##################################################################################### -# -# Copyright (c) 2000-2008, ReportLab Inc. -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without modification, -# are permitted provided that the following conditions are met: -# -# * Redistributions of source code must retain the above copyright notice, -# this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above copyright notice, -# this list of conditions and the following disclaimer in the documentation -# and/or other materials provided with the distribution. -# * Neither the name of the company nor the names of its contributors may be -# used to endorse or promote products derived from this software without -# specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. -# IN NO EVENT SHALL THE OFFICERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED -# TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; -# OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER -# IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING -# IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF -# SUCH DAMAGE. -# -##################################################################################### - - -__docformat__ = 'reStructuredText' - -# Import Psyco if available -from opt_imports import psyco -psyco.full() - -import sys -import os -import tempfile -import re -import string -import config -import logging -from cStringIO import StringIO -from os.path import abspath, dirname, expanduser, join -from urlparse import urljoin, urlparse, urlunparse -from copy import copy, deepcopy -from optparse import OptionParser -from pprint import pprint -from xml.sax.saxutils import unescape, escape - -import docutils.readers.doctree -import docutils.core -import docutils.nodes -from docutils.parsers.rst import directives -from docutils.readers import standalone -from docutils.transforms import Transform - -try: - from roman import toRoman -except ImportError: - from docutils.utils.roman import toRoman - -from reportlab.platypus import * -from reportlab.platypus.doctemplate import IndexingFlowable -from reportlab.platypus.flowables import _listWrapOn, _Container -from reportlab.pdfbase.pdfdoc import PDFPageLabel -#from reportlab.lib.enums import * -#from reportlab.lib.units import * -#from reportlab.lib.pagesizes import * - -from rst2pdf import counter_role, oddeven_directive -from rst2pdf import pygments_code_block_directive # code-block directive -from rst2pdf import flowables -from rst2pdf.flowables import * # our own reportlab flowables -from rst2pdf.sinker import Sinker -from rst2pdf.image import MyImage, missing -from rst2pdf.aafigure_directive import Aanode -from rst2pdf.log import log, nodeid -from rst2pdf.smartypants import smartyPants -from rst2pdf import styles as sty -from rst2pdf.nodehandlers import nodehandlers -from rst2pdf.languages import get_language_available -from rst2pdf.opt_imports import Paragraph, BaseHyphenator, PyHyphenHyphenator, \ - DCWHyphenator, sphinx as sphinx_module, wordaxe - -# Small template engine for covers -# The obvious import doesn't work for complicated reasons ;-) -from rst2pdf import tenjin -to_str = tenjin.helpers.generate_tostrfunc('utf-8') -escape = tenjin.helpers.escape -templateEngine = tenjin.Engine() - -def renderTemplate(tname, **context): - context['to_str'] = to_str - context['escape'] = escape - return templateEngine.render(tname, context) - -#def escape (x,y): -# "Dummy escape function to test for excessive escaping" -# return x - -numberingstyles={ 'arabic': 'ARABIC', - 'roman': 'ROMAN_UPPER', - 'lowerroman': 'ROMAN_LOWER', - 'alpha': 'LETTERS_UPPER', - 'loweralpha': 'LETTERS_LOWER' } - - -class RstToPdf(object): - - def __init__(self, stylesheets=[], - language='en_US', - header=None, - footer=None, - inlinelinks=False, - breaklevel=1, - font_path=[], - style_path=[], - fit_mode='shrink', - background_fit_mode='center', - sphinx=False, - smarty='0', - baseurl=None, - repeat_table_rows=False, - footnote_backlinks=True, - inline_footnotes=False, - real_footnotes=False, - def_dpi=300, - show_frame=False, - highlightlang='python', # this one is only used by Sphinx - basedir=os.getcwd(), - splittables=False, - blank_first_page=False, - first_page_on_right=False, - breakside='odd', - custom_cover='cover.tmpl', - floating_images=False, - numbered_links=False, - section_header_depth=2, - raw_html=False, - strip_elements_with_classes=[] - ): - self.debugLinesPdf=False - self.depth=0 - self.breakside=breakside - self.first_page_on_right=first_page_on_right - self.blank_first_page=blank_first_page - self.splittables=splittables - self.basedir=basedir - self.language, self.docutils_language = get_language_available( - language)[:2] - self.doc_title = "" - self.doc_title_clean = "" - self.doc_subtitle = "" - self.doc_author = "" - self.header = header - self.footer = footer - self.custom_cover=custom_cover - self.floating_images=floating_images - self.decoration = {'header': header, - 'footer': footer, - 'endnotes': [], - 'extraflowables':[]} - # find base path - if hasattr(sys, 'frozen'): - self.PATH = abspath(dirname(sys.executable)) - else: - self.PATH = abspath(dirname(__file__)) - - - self.font_path=font_path - self.style_path=style_path - self.def_dpi=def_dpi - self.loadStyles(stylesheets) - - self.docutils_languages = {} - self.inlinelinks = inlinelinks - self.breaklevel = breaklevel - self.fit_mode = fit_mode - self.background_fit_mode = background_fit_mode - self.to_unlink = [] - self.smarty = smarty - self.baseurl = baseurl - self.repeat_table_rows = repeat_table_rows - self.footnote_backlinks = footnote_backlinks - self.inline_footnotes = inline_footnotes - self.real_footnotes = real_footnotes - # Real footnotes are always a two-pass thing. - if self.real_footnotes: - self.mustMultiBuild = True - self.def_dpi = def_dpi - self.show_frame = show_frame - self.numbered_links = numbered_links - self.section_header_depth = section_header_depth - self.img_dir = os.path.join(self.PATH, 'images') - self.raw_html = raw_html - self.strip_elements_with_classes = strip_elements_with_classes - - # Sorry about this, but importing sphinx.roles makes some - # ordinary documents fail (demo.txt specifically) so - # I can' t just try to import it outside. I need - # to do it only if it's requested - if sphinx and sphinx_module: - import sphinx.roles - from sphinxnodes import sphinxhandlers - self.highlightlang = highlightlang - self.gen_pdftext, self.gen_elements = sphinxhandlers(self) - else: - # These rst2pdf extensions conflict with sphinx - directives.register_directive('code-block', pygments_code_block_directive.code_block_directive) - directives.register_directive('code', pygments_code_block_directive.code_block_directive) - import math_directive - self.gen_pdftext, self.gen_elements = nodehandlers(self) - - self.sphinx = sphinx - - if not self.styles.languages: - self.styles.languages=[] - if self.language: - self.styles.languages.append(self.language) - self.styles['bodytext'].language = self.language - else: - self.styles.languages.append('en_US') - self.styles['bodytext'].language = 'en_US' - # Load the docutils language modules for all required languages - for lang in self.styles.languages: - self.docutils_languages[lang] = get_language_available(lang)[2] - - # Load the hyphenators for all required languages - if wordaxe is not None: - for lang in self.styles.languages: - if lang.split('_', 1)[0] == 'de': - try: - wordaxe.hyphRegistry[lang] = DCWHyphenator('de', 5) - continue - except Exception: - # hyphenators may not always be available or crash, - # e.g. wordaxe issue 2809074 (http://is.gd/16lqs) - log.warning("Can't load wordaxe DCW hyphenator" - " for German language, trying Py hyphenator instead") - else: - continue - try: - wordaxe.hyphRegistry[lang] = PyHyphenHyphenator(lang) - except Exception: - log.warning("Can't load wordaxe Py hyphenator" - " for language %s, trying base hyphenator", lang) - else: - continue - try: - wordaxe.hyphRegistry[lang] = BaseHyphenator(lang) - except Exception: - log.warning("Can't even load wordaxe base hyphenator") - log.info('hyphenation by default in %s , loaded %s', - self.styles['bodytext'].language, - ','.join(self.styles.languages)) - - self.pending_targets=[] - self.targets=[] - - def loadStyles(self, styleSheets=None ): - - if styleSheets is None: - styleSheets=[] - - self.styles = sty.StyleSheet(styleSheets, - self.font_path, - self.style_path, - def_dpi=self.def_dpi) - - def style_language(self, style): - """Return language corresponding to this style.""" - try: - return style.language - except AttributeError: - pass - try: - return self.styles['bodytext'].language - except AttributeError: - # FIXME: this is pretty arbitrary, and will - # probably not do what you want. - # however, it should only happen if: - # * You specified the language of a style - # * Have no wordaxe installed. - # Since it only affects hyphenation, and wordaxe is - # not installed, t should have no effect whatsoever - return os.environ['LANG'] or 'en' - - def text_for_label(self, label, style): - """Translate text for label.""" - try: - text = self.docutils_languages[ - self.style_language(style)].labels[label] - except KeyError: - text = label.capitalize() - return text - - def text_for_bib_field(self, field, style): - """Translate text for bibliographic fields.""" - try: - text = self.docutils_languages[ - self.style_language(style)].bibliographic_fields[field] - except KeyError: - text = field - return text + ":" - - def author_separator(self, style): - """Return separator string for authors.""" - try: - sep = self.docutils_languages[ - self.style_language(style)].author_separators[0] - except KeyError: - sep = ';' - return sep + " " - - def styleToTags(self, style): - '''Takes a style name, returns a pair of opening/closing tags for it, like - "". Used for inline - nodes (custom interpreted roles)''' - - try: - s = self.styles[style] - r1=['') - r2=[''] - - if s.strike: - r1.append('') - r2.insert(0,'') - if s.underline: - r1.append('') - r2.insert(0,'') - - return [''.join(r1), ''.join(r2)] - except KeyError: - log.warning('Unknown class %s', style) - return None - - - def styleToFont(self, style): - '''Takes a style name, returns a font tag for it, like - "". Used for inline - nodes (custom interpreted roles)''' - - try: - s = self.styles[style] - r=['') - return ''.join(r) - except KeyError: - log.warning('Unknown class %s', style) - return None - - def gather_pdftext(self, node, replaceEnt=True): - return ''.join([self.gen_pdftext(n, replaceEnt) - for n in node.children]) - - def gather_elements(self, node, style=None): - if style is None: - style = self.styles.styleForNode(node) - r = [] - if 'float' in style.__dict__: - style = None # Don't pass floating styles to children! - for n in node.children: - # import pdb; pdb.set_trace() - r.extend(self.gen_elements(n, style=style)) - return r - - def bullet_for_node(self, node): - """Takes a node, assumes it's some sort of - item whose parent is a list, and - returns the bullet text it should have""" - b = "" - t = 'item' - if node.parent.get('start'): - start = int(node.parent.get('start')) - else: - start = 1 - - if node.parent.get('bullet') or isinstance( - node.parent, docutils.nodes.bullet_list): - b = node.parent.get('bullet', '*') - if b == "None": - b = "" - t = 'bullet' - - elif node.parent.get('enumtype') == 'arabic': - b = str(node.parent.children.index(node) + start) + '.' - - elif node.parent.get('enumtype') == 'lowerroman': - b = toRoman(node.parent.children.index(node) + start).lower() + '.' - elif node.parent.get('enumtype') == 'upperroman': - b = toRoman(node.parent.children.index(node) + start).upper() + '.' - elif node.parent.get('enumtype') == 'loweralpha': - b = string.lowercase[node.parent.children.index(node) - + start - 1] + '.' - elif node.parent.get('enumtype') == 'upperalpha': - b = string.uppercase[node.parent.children.index(node) - + start - 1] + '.' - else: - log.critical("Unknown kind of list_item %s [%s]", - node.parent, nodeid(node)) - return b, t - - def filltable(self, rows): - """ - Takes a list of rows, consisting of cells and performs the following fixes: - - * For multicolumn cells, add continuation cells, to make all rows the same - size. These cells have to be multirow if the original cell is multirow. - - * For multirow cell, insert continuation cells, to make all columns the - same size. - - * If there are still shorter rows, add empty cells at the end (ReST quirk) - - * Once the table is *normalized*, create spans list, fitting for reportlab's - Table class. - - """ - - # If there is a multicol cell, we need to insert Continuation Cells - # to make all rows the same length - - #from pudb import set_trace; set_trace() - - for y in range(0, len(rows)): - for x in range(len(rows[y])-1, -1, -1): - cell = rows[y][x] - if isinstance(cell, str): - continue - if cell.get("morecols"): - for i in range(0, cell.get("morecols")): - e=docutils.nodes.entry("") - e["morerows"] = cell.get("morerows",0) - rows[y].insert(x + 1, e) - - for y in range(0, len(rows)): - for x in range(0, len(rows[y])): - cell = rows[y][x] - if isinstance(cell, str): - continue - if cell.get("morerows"): - for i in range(0, cell.get("morerows")): - rows[y + i + 1].insert(x, "") - - - # If a row is shorter, add empty cells at the right end - maxw = max([len(r) for r in rows]) - for r in rows: - while len(r) < maxw: - r.append("") - - # Create spans list for reportlab's table style - spans = [] - for y in range(0, len(rows)): - for x in range(0, len(rows[y])): - cell = rows[y][x] - if isinstance(cell, str): - continue - if cell.get("morecols"): - mc = cell.get("morecols") - else: - mc = 0 - if cell.get("morerows"): - mr = cell.get("morerows") - else: - mr = 0 - if mc or mr: - spans.append(('SPAN', (x, y), (x + mc, y + mr))) - return spans - - def PreformattedFit(self, text, style): - """Preformatted section that gets horizontally compressed if needed.""" - # Pass a ridiculous size, then it will shrink to what's available - # in the frame - return BoundByWidth(2000*cm, - content=[XXPreformatted(text, style)], - mode=self.fit_mode, style=style) - - def createPdf(self, text=None, - source_path=None, - output=None, - doctree=None, - compressed=False, - # This adds entries to the PDF TOC - # matching the rst source lines - debugLinesPdf=False): - """Create a PDF from text (ReST input), - or doctree (docutil nodes) and save it in outfile. - - If outfile is a string, it's a filename. - If it's something with a write method, (like a StringIO, - or a file object), the data is saved there. - - """ - self.decoration = {'header': self.header, - 'footer': self.footer, - 'endnotes': [], - 'extraflowables': []} - - self.pending_targets=[] - self.targets=[] - - self.debugLinesPdf = debugLinesPdf - - if doctree is None: - if text is not None: - if self.language: - settings_overrides={'language_code': self.docutils_language} - else: - settings_overrides={} - settings_overrides['strip_elements_with_classes']=self.strip_elements_with_classes - self.doctree = docutils.core.publish_doctree(text, - source_path=source_path, - settings_overrides=settings_overrides) - #import pdb; pdb.set_trace() - log.debug(self.doctree) - else: - log.error('Error: createPdf needs a text or a doctree') - return - else: - self.doctree = doctree - - if self.numbered_links: - # Transform all links to sections so they show numbers - from sectnumlinks import SectNumFolder, SectRefExpander - snf = SectNumFolder(self.doctree) - self.doctree.walk(snf) - srf = SectRefExpander(self.doctree, snf.sectnums) - self.doctree.walk(srf) - if self.strip_elements_with_classes: - from docutils.transforms.universal import StripClassesAndElements - sce = StripClassesAndElements(self.doctree) - sce.apply() - - elements = self.gen_elements(self.doctree) - - # Find cover template, save it in cover_file - def find_cover(name): - cover_path=[self.basedir, os.path.expanduser('~/.rst2pdf'), - os.path.join(self.PATH,'templates')] - cover_file=None - for d in cover_path: - if os.path.exists(os.path.join(d,name)): - cover_file=os.path.join(d,name) - break - return cover_file - - cover_file=find_cover(self.custom_cover) - if cover_file is None: - log.error("Can't find cover template %s, using default"%self.custom_cover) - cover_file=find_cover('cover.tmpl') - - # Feed data to the template, get restructured text. - cover_text = renderTemplate(tname=cover_file, - title=self.doc_title, - subtitle=self.doc_subtitle - ) - - # This crashes sphinx because .. class:: in sphinx is - # something else. Ergo, pdfbuilder does it in its own way. - if not self.sphinx: - - elements = self.gen_elements( - publish_secondary_doctree(cover_text, self.doctree, source_path)) + elements - - if self.blank_first_page: - elements.insert(0,PageBreak()) - - # Put the endnotes at the end ;-) - endnotes = self.decoration['endnotes'] - if endnotes: - elements.append(MySpacer(1, 2*cm)) - elements.append(Separation()) - for n in self.decoration['endnotes']: - t_style = TableStyle(self.styles['endnote'].commands) - colWidths = self.styles['endnote'].colWidths - elements.append(DelayedTable([[n[0], n[1]]], - style=t_style, colWidths=colWidths)) - - if self.floating_images: - #from pudb import set_trace; set_trace() - # Handle images with alignment more like in HTML - new_elem=[] - for i,e in enumerate(elements[::-1]): - if (isinstance (e, MyImage) and e.image.hAlign != 'CENTER' - and new_elem): - # This is an image where flowables should wrap - # around it - popped=new_elem.pop() - new_elem.append(ImageAndFlowables(e,popped, - imageSide=e.image.hAlign.lower())) - else: - new_elem.append(e) - - elements = new_elem - elements.reverse() - - # Containers now result in elements being a nested list - # We need to undo that here - def flatten(lis): - """Given a list, possibly nested to any level, return it flattened.""" - new_lis = [] - for item in lis: - if type(item) == type([]): - new_lis.extend(flatten(item)) - else: - new_lis.append(item) - return new_lis - elements = flatten(elements) - - head = self.decoration['header'] - foot = self.decoration['footer'] - - # So, now, create the FancyPage with the right sizes and elements - FP = FancyPage("fancypage", head, foot, self) - - def cleantags(s): - re.sub(r'<[^>]*?>', '', - unicode(s).strip()) - - pdfdoc = FancyDocTemplate( - output, - pageTemplates=[FP], - showBoundary=0, - pagesize=self.styles.ps, - title=self.doc_title_clean, - author=self.doc_author, - pageCompression=compressed) - pdfdoc.client =self - - if getattr(self, 'mustMultiBuild', False): - # Force a multibuild pass - if not isinstance(elements[-1],UnhappyOnce): - log.info ('Forcing second pass so Total pages work') - elements.append(UnhappyOnce()) - while True: - try: - log.info("Starting build") - # See if this *must* be multipass - pdfdoc.multiBuild(elements) - # Force a multibuild pass - - # FIXME: since mustMultiBuild is set by the - # first pass in the case of ###Total###, then we - # make a new forced two-pass build. This is broken. - # conceptually. - - if getattr(self, 'mustMultiBuild', False): - # Force a multibuild pass - if not isinstance(elements[-1],UnhappyOnce): - log.info ('Forcing second pass so Total pages work') - elements.append(UnhappyOnce()) - continue - ## Rearrange footnotes if needed - if self.real_footnotes: - newStory=[] - fnPile=[] - for e in elements: - if getattr(e,'isFootnote',False): - # Add it to the pile - #if not isinstance (e, MySpacer): - fnPile.append(e) - elif getattr(e, '_atTop', False) or isinstance( - e, (UnhappyOnce, MyPageBreak)): - if fnPile: - fnPile.insert(0, Separation()) - newStory.append(Sinker(fnPile)) - newStory.append(e) - fnPile=[] - else: - newStory.append(e) - elements = newStory+fnPile - for e in elements: - if hasattr(e, '_postponed'): - delattr(e,'_postponed') - self.real_footnotes = False - continue - - - - break - except ValueError, v: - # FIXME: cross-document links come through here, which means - # an extra pass per cross-document reference. Which sucks. - #if v.args and str(v.args[0]).startswith('format not resolved'): - #missing=str(v.args[0]).split(' ')[-1] - #log.error('Adding missing reference to %s and rebuilding. This is slow!'%missing) - #elements.append(Reference(missing)) - #for e in elements: - #if hasattr(e,'_postponed'): - #delattr(e,'_postponed') - #else: - #raise - raise - - #doc = SimpleDocTemplate("phello.pdf") - #doc.build(elements) - for fn in self.to_unlink: - try: - os.unlink(fn) - except OSError: - pass - - -from reportlab.platypus import doctemplate - -class FancyDocTemplate(BaseDocTemplate): - - def afterFlowable(self, flowable): - - if isinstance(flowable, Heading): - # Notify TOC entry for headings/abstracts/dedications. - level, text = flowable.level, flowable.text - parent_id = flowable.parent_id - node = flowable.node - pagenum = setPageCounter() - self.notify('TOCEntry', (level, text, pagenum, parent_id, node)) - - - def handle_flowable(self,flowables): - '''try to handle one flowable from the front of list flowables.''' - - # this method is copied from reportlab - - #allow document a chance to look at, modify or ignore - #the object(s) about to be processed - self.filterFlowables(flowables) - - self.handle_breakBefore(flowables) - self.handle_keepWithNext(flowables) - f = flowables[0] - del flowables[0] - if f is None: - return - - if isinstance(f,PageBreak): - if isinstance(f,SlowPageBreak): - self.handle_pageBreak(slow=1) - else: - self.handle_pageBreak() - self.afterFlowable(f) - elif isinstance(f,ActionFlowable): - f.apply(self) - self.afterFlowable(f) - else: - frame = self.frame - canv = self.canv - #try to fit it then draw it - if frame.add(f, canv, trySplit=self.allowSplitting): - if not isinstance(f,FrameActionFlowable): - self._curPageFlowableCount += 1 - self.afterFlowable(f) - doctemplate._addGeneratedContent(flowables,frame) - else: - if self.allowSplitting: - # see if this is a splittable thing - S = frame.split(f,canv) - n = len(S) - else: - n = 0 - if n: - # I'm not 100% sure that adding 'ImageAndFlowables' here is teh right thing to do, - # As it results in an extra linne space when flowing around the bottom of an image - # But I think this is more likely an issue in ReportLab, and without the change - # We get the 'Splitting error' stack-trace - if not isinstance(S[0],(PageBreak,SlowPageBreak,ActionFlowable,ImageAndFlowables)): - if frame.add(S[0], canv, trySplit=0): - self._curPageFlowableCount += 1 - self.afterFlowable(S[0]) - doctemplate._addGeneratedContent(flowables,frame) - else: - ident = "Splitting error(n==%d) on page %d in\n%s" % ( - n, self.page, self._fIdent(f, 60, frame)) - #leave to keep apart from the raise - raise LayoutError(ident) - del S[0] - for i,f in enumerate(S): - flowables.insert(i,f) # put split flowables back on the list - else: - if hasattr(f,'_postponed') and f._postponed > 4: - ident = "Flowable %s%s too large on page %d in frame %r%s of template %r" % ( - self._fIdent(f, 60, frame), doctemplate._fSizeString(f),self.page, - self.frame.id, self.frame._aSpaceString(), self.pageTemplate.id) - #leave to keep apart from the raise - raise LayoutError(ident) - # this ought to be cleared when they are finally drawn! - f._postponed = 1 - mbe = getattr(self, '_multiBuildEdits', None) - if mbe: - mbe((delattr, f, '_postponed')) - flowables.insert(0, f) # put the flowable back - self.handle_frameEnd() - - -_counter=0 -_counterStyle='arabic' - -class PageCounter(Flowable): - - def __init__(self, number=0, style='arabic'): - self.style=str(style).lower() - self.number=int(number) - Flowable.__init__(self) - - def wrap(self, availWidth, availHeight): - global _counter, _counterStyle - _counterStyle=self.style - _counter=self.number - return (self.width, self.height) - - def drawOn(self, canvas, x, y, _sW): - pass - -flowables.PageCounter = PageCounter - -def setPageCounter(counter=None, style=None): - - global _counter, _counterStyle - - if counter is not None: - _counter = counter - if style is not None: - _counterStyle = style - - if _counterStyle=='lowerroman': - ptext=toRoman(_counter).lower() - elif _counterStyle=='roman': - ptext=toRoman(_counter).upper() - elif _counterStyle=='alpha': - ptext=string.uppercase[_counter%26] - elif _counterStyle=='loweralpha': - ptext=string.lowercase[_counter%26] - else: - ptext=unicode(_counter) - return ptext - -class MyContainer(_Container, Flowable): - pass - -class UnhappyOnce(IndexingFlowable): - '''An indexing flowable that is only unsatisfied once. - If added to a story, it will make multiBuild run - at least two passes. Useful for ###Total###''' - _unhappy=True - def isSatisfied(self): - if self._unhappy: - self._unhappy= False - return False - return True - - def draw(self): - pass - -class HeaderOrFooter(object): - """ A helper object for FancyPage (below) - HeaderOrFooter handles operations which are common - to both headers and footers - """ - def __init__(self, items=None, isfooter=False, client=None): - self.items = items - if isfooter: - locinfo = 'footer showFooter defaultFooter footerSeparator' - else: - locinfo = 'header showHeader defaultHeader headerSeparator' - self.isfooter = isfooter - self.loc, self.showloc, self.defaultloc, self.addsep = locinfo.split() - self.totalpages = 0 - self.client = client - - def prepare(self, pageobj, canv, doc): - showloc = pageobj.template.get(self.showloc, True) - height = 0 - items = self.items - if showloc: - if not items: - items = pageobj.template.get(self.defaultloc) - if items: - items = self.client.gen_elements(publish_secondary_doctree(items, self.client.doctree, None)) - if items: - if isinstance(items, list): - items = items[:] - else: - items = [Paragraph(items, pageobj.styles[self.loc])] - addsep = pageobj.template.get(self.addsep, False) - if addsep: - if self.isfooter: - items.insert(0, Separation()) - else: - items.append(Separation()) - _, height = _listWrapOn(items, pageobj.tw, canv) - self.prepared = height and items - return height - - def replaceTokens(self, elems, canv, doc, smarty): - """Put doc_title/page number/etc in text of header/footer.""" - - # Make sure page counter is up to date - pnum=setPageCounter() - - def replace(text): - if not isinstance(text, unicode): - try: - text = unicode(text, e.encoding) - except AttributeError: - text = unicode(text, 'utf-8') - except TypeError: - text = unicode(text, 'utf-8') - - text = text.replace(u'###Page###', pnum) - if '###Total###' in text: - text = text.replace(u'###Total###', str(self.totalpages)) - self.client.mustMultiBuild=True - text = text.replace(u"###Title###", doc.title) - text = text.replace(u"###Section###", - getattr(canv, 'sectName', '')) - text = text.replace(u"###SectNum###", - getattr(canv, 'sectNum', '')) - text = smartyPants(text, smarty) - return text - - for i,e in enumerate(elems): - # TODO: implement a search/replace for arbitrary things - if isinstance(e, Paragraph): - text = replace(e.text) - elems[i] = Paragraph(text, e.style) - elif isinstance(e, DelayedTable): - data=deepcopy(e.data) - for r,row in enumerate(data): - for c,cell in enumerate(row): - if isinstance (cell, list): - data[r][c]=self.replaceTokens(cell, canv, doc, smarty) - else: - row[r]=self.replaceTokens([cell,], canv, doc, smarty)[0] - elems[i]=DelayedTable(data, e._colWidths, e.style) - - elif isinstance(e, BoundByWidth): - for index, item in enumerate(e.content): - if isinstance(item, Paragraph): - e.content[index] = Paragraph(replace(item.text), item.style) - elems[i] = e - - elif isinstance(e, OddEven): - odd=self.replaceTokens([e.odd,], canv, doc, smarty)[0] - even=self.replaceTokens([e.even,], canv, doc, smarty)[0] - elems[i]=OddEven(odd, even) - return elems - - def draw(self, pageobj, canv, doc, x, y, width, height): - self.totalpages = max(self.totalpages, doc.page) - items = self.prepared - if items: - self.replaceTokens(items, canv, doc, pageobj.smarty) - container = MyContainer() - container._content = items - container.width = width - container.height = height - container.drawOn(canv, x, y) - - -class FancyPage(PageTemplate): - """ A page template that handles changing layouts. - """ - - def __init__(self, _id, _head, _foot, client): - self.client = client - self.styles = client.styles - self._head = HeaderOrFooter(_head, client=client) - self._foot = HeaderOrFooter(_foot, True, client) - self.smarty = client.smarty - self.show_frame = client.show_frame - self.image_cache = {} - PageTemplate.__init__(self, _id, []) - - - def draw_background(self, which, canv): - ''' Draws a background and/or foreground image - on each page which uses the template. - - Calculates the image one time, and caches - it for reuse on every page in the template. - - How the background is drawn depends on the - --fit-background-mode option. - - If desired, we could add code to push it around - on the page, using stylesheets to align and/or - set the offset. - ''' - uri=self.template[which] - info = self.image_cache.get(uri) - if info is None: - fname, _, _ = MyImage.split_uri(uri) - if not os.path.exists(fname): - del self.template[which] - log.error("Missing %s image file: %s", which, uri) - return - try: - w, h, kind = MyImage.size_for_node(dict(uri=uri, ), self.client) - except ValueError: - # Broken image, return arbitrary stuff - uri=missing - w, h, kind = 100, 100, 'direct' - - pw, ph = self.styles.pw, self.styles.ph - if self.client.background_fit_mode == 'center': - scale = min(1.0, 1.0 * pw / w, 1.0 * ph / h) - sw, sh = w * scale, h * scale - x, y = (pw - sw) / 2.0, (ph - sh) / 2.0 - elif self.client.background_fit_mode == 'scale': - x, y = 0, 0 - sw, sh = pw, ph - else: - log.error('Unknown background fit mode: %s'% self.client.background_fit_mode) - # Do scale anyway - x, y = 0, 0 - sw, sh = pw, ph - - bg = MyImage(uri, sw, sh, client=self.client) - self.image_cache[uri] = info = bg, x, y - bg, x, y = info - bg.drawOn(canv, x, y) - - def is_left(self, page_num): - """Default behavior is that the first page is on the left. - - If the user has --first_page_on_right, the calculation is reversed. - """ - val = page_num % 2 == 1 - if self.client.first_page_on_right: - val = not val - return val - - - def beforeDrawPage(self, canv, doc): - """Do adjustments to the page according to where we are in the document. - - * Gutter margins on left or right as needed - - """ - - global _counter, _counterStyle - - styles = self.styles - self.tw = styles.pw - styles.lm - styles.rm - styles.gm - # What page template to use? - tname = canv.__dict__.get('templateName', - self.styles.firstTemplate) - self.template = self.styles.pageTemplates[tname] - canv.templateName=tname - - doct = getattr(canv, '_doctemplate', None) - canv._doctemplate = None # to make _listWrapOn work - - if doc.page == 1: - _counter = 0 - _counterStyle = 'arabic' - _counter += 1 - - # Adjust text space accounting for header/footer - - self.hh = self._head.prepare(self, canv, doc) - self.fh = self._foot.prepare(self, canv, doc) - - canv._doctemplate = doct - - self.hx = styles.lm - self.hy = styles.ph - styles.tm - self.hh - - self.fx = styles.lm - self.fy = styles.bm - self.th = styles.ph - styles.tm - styles.bm - self.hh \ - - self.fh - styles.ts - styles.bs - - # Adjust gutter margins - if self.is_left(doc.page): # Left page - x1 = styles.lm - else: # Right page - x1 = styles.lm + styles.gm - y1 = styles.bm + self.fh + styles.bs - - # If there is a background parameter for this page Template, draw it - if 'background' in self.template: - self.draw_background('background', canv) - - self.frames = [] - for frame in self.template['frames']: - self.frames.append(SmartFrame(self, - styles.adjustUnits(frame[0], self.tw) + x1, - styles.adjustUnits(frame[1], self.th) + y1, - styles.adjustUnits(frame[2], self.tw), - styles.adjustUnits(frame[3], self.th), - showBoundary=self.show_frame)) - canv.firstSect = True - canv._pagenum = doc.page - for frame in self.frames: - frame._pagenum=doc.page - - def afterDrawPage(self, canv, doc): - """Draw header/footer.""" - # Adjust for gutter margin - canv.addPageLabel(canv._pageNumber-1,numberingstyles[_counterStyle],_counter) - - log.info('Page %s [%s]'%(_counter,doc.page)) - if self.is_left(doc.page): # Left page - hx = self.hx - fx = self.fx - else: # Right Page - hx = self.hx + self.styles.gm - fx = self.fx + self.styles.gm - - self._head.draw(self, canv, doc, hx, self.hy, self.tw, self.hh) - self._foot.draw(self, canv, doc, fx, self.fy, self.tw, self.fh) - - # If there is a foreground parameter for this page Template, draw it - if 'foreground' in self.template: - self.draw_background('foreground', canv) - - -def parse_commandline(): - - parser = OptionParser() - - parser.add_option('--config', dest='configfile', metavar='FILE', - help='Config file to use. Default=~/.rst2pdf/config') - - parser.add_option('-o', '--output', dest='output', metavar='FILE', - help='Write the PDF to FILE') - - def_ssheets = ','.join([expanduser(p) for p in - config.getValue("general", "stylesheets", "").split(',')]) - parser.add_option('-s', '--stylesheets', dest='style', - type='string', action='append', - metavar='STYLESHEETS', default=[def_ssheets], - help='A comma-separated list of custom stylesheets. Default="%s"' - % def_ssheets) - - def_sheetpath = os.pathsep.join([expanduser(p) for p in - config.getValue("general", "stylesheet_path", "").split(os.pathsep)]) - parser.add_option('--stylesheet-path', dest='stylepath', - metavar='FOLDER%sFOLDER%s...%sFOLDER'%((os.pathsep, )*3), - default=def_sheetpath, - help='A list of folders to search for stylesheets,' - ' separated using "%s". Default="%s"' %(os.pathsep, def_sheetpath)) - - def_compressed = config.getValue("general", "compressed", False) - parser.add_option('-c', '--compressed', dest='compressed', - action="store_true", default=def_compressed, - help='Create a compressed PDF. Default=%s'%def_compressed) - - parser.add_option('--print-stylesheet', dest='printssheet', - action="store_true", default=False, - help='Print the default stylesheet and exit') - - parser.add_option('--font-folder', dest='ffolder', metavar='FOLDER', - help='Search this folder for fonts. (Deprecated)') - - def_fontpath = os.pathsep.join([expanduser(p) for p in - config.getValue("general", "font_path", "").split(os.pathsep)]) - parser.add_option('--font-path', dest='fpath', - metavar='FOLDER%sFOLDER%s...%sFOLDER'%((os.pathsep, )*3), - default=def_fontpath, - help='A list of folders to search for fonts, separated using "%s".' - ' Default="%s"' % (os.pathsep, def_fontpath)) - - def_baseurl = urlunparse(['file',os.getcwd()+os.sep,'','','','']) - parser.add_option('--baseurl', dest='baseurl', metavar='URL', - default=def_baseurl, - help='The base URL for relative URLs. Default="%s"'%def_baseurl) - - def_lang = config.getValue("general", "language", 'en_US') - parser.add_option('-l', '--language', metavar='LANG', - default=def_lang, dest='language', - help='Language to be used for hyphenation' - ' and docutils localizations. Default="%s"' % def_lang) - - def_header = config.getValue("general", "header") - parser.add_option('--header', metavar='HEADER', - default=def_header, dest='header', - help='Page header if not specified in the document.' - ' Default="%s"' % def_header) - - def_footer = config.getValue("general", "footer") - parser.add_option('--footer', metavar='FOOTER', - default=def_footer, dest='footer', - help='Page footer if not specified in the document.' - ' Default="%s"' % def_footer) - - def_section_header_depth = config.getValue("general","section_header_depth",2) - parser.add_option('--section-header-depth', metavar='N', - default=def_section_header_depth, dest='section_header_depth', - help = '''Sections up to this depth will be used in the header and footer's replacement of ###Section###. Default=%s''' % def_section_header_depth) - - def_smartquotes = config.getValue("general", "smartquotes", "0") - parser.add_option("--smart-quotes", metavar="VALUE", - default=def_smartquotes, dest="smarty", - help='Try to convert ASCII quotes, ellipses and dashes' - ' to the typographically correct equivalent. For details,' - ' read the man page or the manual. Default="%s"' % def_smartquotes) - - def_fit = config.getValue("general", "fit_mode", "shrink") - parser.add_option('--fit-literal-mode', metavar='MODE', - default=def_fit, dest='fit_mode', - help='What to do when a literal is too wide. One of error,' - ' overflow,shrink,truncate. Default="%s"' % def_fit) - - def_fit_background = config.getValue("general", "background_fit_mode", - "center") - parser.add_option('--fit-background-mode', metavar='MODE', - default=def_fit_background, dest='background_fit_mode', - help='How to fit the background image to the page.' - ' One of scale or center. Default="%s"' % def_fit_background) - - parser.add_option('--inline-links', action="store_true", - dest='inlinelinks', default=False, - help='Shows target between parentheses instead of active link.') - - parser.add_option('--repeat-table-rows', action="store_true", - dest='repeattablerows', default=False, - help='Repeats header row for each split table.') - - def_raw_html = config.getValue("general", "raw_html", False) - parser.add_option('--raw-html', action="store_true", - dest='raw_html', default=def_raw_html, - help='Support embeddig raw HTML. Default=%s' % def_raw_html) - - parser.add_option('-q', '--quiet', action="store_true", - dest='quiet', default=False, - help='Print less information.') - - parser.add_option('-v', '--verbose', action="store_true", - dest='verbose', default=False, - help='Print debug information.') - - parser.add_option('--very-verbose', action="store_true", - dest='vverbose', default=False, - help='Print even more debug information.') - - parser.add_option('--version', action="store_true", - dest='version', default=False, - help='Print version number and exit.') - - def_footnote_backlinks = config.getValue("general", - "footnote_backlinks", True) - parser.add_option('--no-footnote-backlinks', action='store_false', - dest='footnote_backlinks', default=def_footnote_backlinks, - help='Disable footnote backlinks.' - ' Default=%s' % str(not def_footnote_backlinks)) - - def_inline_footnotes = config.getValue("general", - "inline_footnotes", False) - parser.add_option('--inline-footnotes', action='store_true', - dest='inline_footnotes', default=def_inline_footnotes, - help='Show footnotes inline.' - ' Default=%s' % str(not def_inline_footnotes)) - - def_real_footnotes = config.getValue("general", - "real_footnotes", False) - parser.add_option('--real-footnotes', action='store_true', - dest='real_footnotes', default=def_real_footnotes, - help='Show footnotes at the bottom of the page where they are defined.' - ' Default=%s' % str(def_real_footnotes)) - - def_dpi = config.getValue("general", "default_dpi", 300) - parser.add_option('--default-dpi', dest='def_dpi', metavar='NUMBER', - default=def_dpi, - help='DPI for objects sized in pixels. Default=%d'%def_dpi) - - parser.add_option('--show-frame-boundary', dest='show_frame', - action='store_true', default=False, - help='Show frame borders (only useful for debugging). Default=False') - - parser.add_option('--disable-splittables', dest='splittables', - action='store_false', default=True, - help="Don't use splittable flowables in some elements." - " Only try this if you can't process a document any other way.") - - def_break = config.getValue("general", "break_level", 0) - parser.add_option('-b', '--break-level', dest='breaklevel', - metavar='LEVEL', default=def_break, - help='Maximum section level that starts in a new page.' - ' Default: %d' % def_break) - - def_blankfirst = config.getValue("general", "blank_first_page", False) - parser.add_option('--blank-first-page', dest='blank_first_page', - action='store_true', default=def_blankfirst, - help='Add a blank page at the beginning of the document.') - - def_first_page_on_right = config.getValue("general", "first_page_on_right", False) - parser.add_option('--first-page-on-right', dest='first_page_on_right', - action='store_true', default=def_first_page_on_right, - help='Two-sided book style (where first page starts on the right side)') - - def_breakside = config.getValue("general", "break_side", 'any') - parser.add_option('--break-side', dest='breakside', metavar='VALUE', - default=def_breakside, - help='How section breaks work. Can be "even", and sections start' - ' in an even page, "odd", and sections start in odd pages,' - ' or "any" and sections start in the next page, be it even or odd.' - ' See also the -b option.') - - parser.add_option('--date-invariant', dest='invariant', - action='store_true', default=False, - help="Don't store the current date in the PDF." - " Useful mainly for the test suite," - " where we don't want the PDFs to change.") - - parser.add_option('-e', '--extension-module', dest='extensions', action="append", type="string", - default = ['vectorpdf'], - help="Add a helper extension module to this invocation of rst2pdf " - "(module must end in .py and be on the python path)") - - def_cover = config.getValue("general", "custom_cover", 'cover.tmpl') - parser.add_option('--custom-cover', dest='custom_cover', - metavar='FILE', default= def_cover, - help='Template file used for the cover page. Default: %s'%def_cover) - - def_floating_images = config.getValue("general", "floating_images", False) - parser.add_option('--use-floating-images', action='store_true', default=def_floating_images, - help='Makes images with :align: attribute work more like in rst2html. Default: %s'%def_floating_images, - dest='floating_images') - - def_numbered_links = config.getValue("general", "numbered_links", False) - parser.add_option('--use-numbered-links', action='store_true', default=def_numbered_links, - help='When using numbered sections, adds the numbers to all links referring to the section headers. Default: %s'%def_numbered_links, - dest='numbered_links') - - parser.add_option('--strip-elements-with-class', action='append', dest='strip_elements_with_classes', - metavar='CLASS', help='Remove elements with this CLASS from the output. Can be used multiple times.') - - return parser - -def main(_args=None): - """Parse command line and call createPdf with the correct data.""" - - parser = parse_commandline() - # Fix issue 430: don't overwrite args - # need to parse_args to see i we have a custom config file - options, args = parser.parse_args(copy(_args)) - - if options.configfile: - # If there is a config file, we need to reparse - # the command line because we have different defaults - config.parseConfig(options.configfile) - parser = parse_commandline() - options, args = parser.parse_args(copy(_args)) - - if options.version: - from rst2pdf import version - print version - sys.exit(0) - - if options.quiet: - log.setLevel(logging.CRITICAL) - - if options.verbose: - log.setLevel(logging.INFO) - - if options.vverbose: - log.setLevel(logging.DEBUG) - - if options.printssheet: - # find base path - if hasattr(sys, 'frozen'): - PATH = abspath(dirname(sys.executable)) - else: - PATH = abspath(dirname(__file__)) - print open(join(PATH, 'styles', 'styles.style')).read() - sys.exit(0) - - filename = False - - if len(args) == 0: - args = [ '-', ] - elif len(args) > 2: - log.critical('Usage: %s [ file.txt [ file.pdf ] ]', sys.argv[0]) - sys.exit(1) - elif len(args) == 2: - if options.output: - log.critical('You may not give both "-o/--output" and second argument') - sys.exit(1) - options.output = args.pop() - - if args[0] == '-': - infile = sys.stdin - options.basedir=os.getcwd() - elif len(args) > 1: - log.critical('Usage: %s file.txt [ -o file.pdf ]', sys.argv[0]) - sys.exit(1) - else: - filename = args[0] - options.basedir=os.path.dirname(os.path.abspath(filename)) - try: - infile = open(filename) - except IOError, e: - log.error(e) - sys.exit(1) - options.infile = infile - - if options.output: - outfile = options.output - if outfile == '-': - outfile = sys.stdout - options.compressed = False - #we must stay quiet - log.setLevel(logging.CRITICAL) - else: - if filename: - if filename.endswith('.txt') or filename.endswith('.rst'): - outfile = filename[:-4] + '.pdf' - else: - outfile = filename + '.pdf' - else: - outfile = sys.stdout - options.compressed = False - #we must stay quiet - log.setLevel(logging.CRITICAL) - #/reportlab/pdfbase/pdfdoc.py output can - #be a callable (stringio, stdout ...) - options.outfile = outfile - - ssheet = [] - if options.style: - for l in options.style: - ssheet += l.split(',') - else: - ssheet = [] - options.style = [x for x in ssheet if x] - - fpath = [] - if options.fpath: - fpath = options.fpath.split(os.pathsep) - if options.ffolder: - fpath.append(options.ffolder) - options.fpath = fpath - - spath = [] - if options.stylepath: - spath = options.stylepath.split(os.pathsep) - options.stylepath = spath - - if options.real_footnotes: - options.inline_footnotes = True - - if reportlab.Version < '2.3': - log.warning('You are using Reportlab version %s.' - ' The suggested version is 2.3 or higher' % reportlab.Version) - - if options.invariant: - patch_PDFDate() - patch_digester() - - add_extensions(options) - - RstToPdf( - stylesheets=options.style, - language=options.language, - header=options.header, footer=options.footer, - inlinelinks=options.inlinelinks, - breaklevel=int(options.breaklevel), - baseurl=options.baseurl, - fit_mode=options.fit_mode, - background_fit_mode = options.background_fit_mode, - smarty=str(options.smarty), - font_path=options.fpath, - style_path=options.stylepath, - repeat_table_rows=options.repeattablerows, - footnote_backlinks=options.footnote_backlinks, - inline_footnotes=options.inline_footnotes, - real_footnotes=options.real_footnotes, - def_dpi=int(options.def_dpi), - basedir=options.basedir, - show_frame=options.show_frame, - splittables=options.splittables, - blank_first_page=options.blank_first_page, - first_page_on_right=options.first_page_on_right, - breakside=options.breakside, - custom_cover=options.custom_cover, - floating_images=options.floating_images, - numbered_links=options.numbered_links, - raw_html=options.raw_html, - section_header_depth=int(options.section_header_depth), - strip_elements_with_classes=options.strip_elements_with_classes, - ).createPdf(text=options.infile.read(), - source_path=options.infile.name, - output=options.outfile, - compressed=options.compressed) - -# Ugly hack that fixes Issue 335 -reportlab.lib.utils.ImageReader.__deepcopy__ = lambda self,*x: copy(self) - -def patch_digester(): - ''' Patch digester so that we can get the same results when image -filenames change''' - import reportlab.pdfgen.canvas as canvas - - cache = {} - - def _digester(s): - index = cache.setdefault(s, len(cache)) - return 'rst2pdf_image_%s' % index - canvas._digester = _digester - -def patch_PDFDate(): - '''Patch reportlab.pdfdoc.PDFDate so the invariant dates work correctly''' - from reportlab.pdfbase import pdfdoc - import reportlab - class PDFDate: - __PDFObject__ = True - # gmt offset now suppported - def __init__(self, invariant=True, dateFormatter=None): - now = (2000,01,01,00,00,00,0) - self.date = now[:6] - self.dateFormatter = dateFormatter - - def format(self, doc): - from time import timezone - dhh, dmm = timezone // 3600, (timezone % 3600) % 60 - dfmt = self.dateFormatter or ( - lambda yyyy,mm,dd,hh,m,s: - "D:%04d%02d%02d%02d%02d%02d%+03d'%02d'" % (yyyy,mm,dd,hh,m,s,0,0)) - return pdfdoc.format(pdfdoc.PDFString(dfmt(*self.date)), doc) - - pdfdoc.PDFDate = PDFDate - reportlab.rl_config.invariant = 1 - -def add_extensions(options): - - extensions = [] - for ext in options.extensions: - if not ext.startswith('!'): - extensions.append(ext) - continue - ext = ext[1:] - try: - extensions.remove(ext) - except ValueError: - log.warning('Could not remove extension %s -- no such extension installed' % ext) - else: - log.info('Removed extension %s' % ext) - - options.extensions[:] = extensions - if not extensions: - return - - class ModuleProxy(object): - def __init__(self): - self.__dict__ = globals() - - createpdf = ModuleProxy() - for modname in options.extensions: - prefix, modname = os.path.split(modname) - path_given = prefix - if modname.endswith('.py'): - modname = modname[:-3] - path_given = True - if not prefix: - prefix = os.path.join(os.path.dirname(__file__), 'extensions') - if prefix not in sys.path: - sys.path.append(prefix) - prefix = os.getcwd() - if prefix not in sys.path: - sys.path.insert(0, prefix) - log.info('Importing extension module %s', repr(modname)) - firstname = path_given and modname or (modname + '_r2p') - try: - try: - module = __import__(firstname, globals(), locals()) - except ImportError, e: - if firstname != str(e).split()[-1]: - raise - module = __import__(modname, globals(), locals()) - except ImportError, e: - if str(e).split()[-1] not in [firstname, modname]: - raise - raise SystemExit('\nError: Could not find module %s ' - 'in sys.path [\n %s\n]\nExiting...\n' % - (modname, ',\n '.join(sys.path))) - if hasattr(module, 'install'): - module.install(createpdf, options) - -def monkeypatch(): - ''' For initial test purposes, make reportlab 2.4 mostly perform like 2.3. - This allows us to compare PDFs more easily. - - There are two sets of changes here: - - 1) rl_config.paraFontSizeHeightOffset = False - - This reverts a change reportlab that messes up a lot of docs. - We may want to keep this one in here, or at least figure out - the right thing to do. If we do NOT keep this one here, - we will have documents look different in RL2.3 than they do - in RL2.4. This is probably unacceptable. - - 2) Everything else (below the paraFontSizeHeightOffset line): - - These change some behavior in reportlab that affects the - graphics content stream without affecting the actual output. - - We can remove these changes after making sure we are happy - and the checksums are good. - ''' - import reportlab - from reportlab import rl_config - from reportlab.pdfgen.canvas import Canvas - from reportlab.pdfbase import pdfdoc - - if getattr(reportlab, 'Version', None) != '2.4': - return - - # NOTE: THIS IS A REAL DIFFERENCE -- DEFAULT y-offset FOR CHARS CHANGES!!! - rl_config.paraFontSizeHeightOffset = False - - # Fix the preamble. 2.4 winds up injecting an extra space, so we toast it. - - def new_make_preamble(self): - self._old_make_preamble() - self._preamble = ' '.join(self._preamble.split()) - - Canvas._old_make_preamble = Canvas._make_preamble - Canvas._make_preamble = new_make_preamble - - # A new optimization removes the CR/LF between 'endstream' and 'endobj' - # Remove it for comparison - pdfdoc.INDIRECTOBFMT = pdfdoc.INDIRECTOBFMT.replace('CLINEEND', 'LINEEND') - - # By default, transparency is set, and by default, that changes PDF version - # to 1.4 in RL 2.4. - pdfdoc.PDF_SUPPORT_VERSION['transparency'] = 1,3 - -monkeypatch() - -def publish_secondary_doctree(text, main_tree, source_path): - - # This is a hack so the text substitutions defined - # in the document are available when we process the cover - # page. See Issue 322 - dt = main_tree - # Add substitutions from the main doctree - class addSubsts(Transform): - default_priority = 219 - - def apply(self): - self.document.substitution_defs.update(dt.substitution_defs) - self.document.substitution_names.update(dt.substitution_names) - - # Use an own reader to modify transformations done. - class Reader(standalone.Reader): - - def get_transforms(self): - default = standalone.Reader.get_transforms(self) - return (default + [ addSubsts, ]) - - # End of Issue 322 hack - - return docutils.core.publish_doctree(text, - reader = Reader(), source_path=source_path) - - -if __name__ == "__main__": - main(sys.argv[1:]) diff --git a/dist-packages/rst2pdf/rst2pdf/dumpstyle.py b/dist-packages/rst2pdf/rst2pdf/dumpstyle.py deleted file mode 100755 index 7593c44ca..000000000 --- a/dist-packages/rst2pdf/rst2pdf/dumpstyle.py +++ /dev/null @@ -1,161 +0,0 @@ -#!/usr/bin/env python -''' - Call dumps() to dump a stylesheet to a string. - - Or run the script to dump all .json in the styles directory - to .style in the styles directory. -''' - -import sys -import os - -from rson import loads as rloads -from json import loads as jloads - -def dumps(obj, forcestyledict=True): - ''' If forcestyledict is True, will attempt to - turn styles into a dictionary. - ''' - - def dofloat(result, obj, indent): - s = '%.3f' % obj - while '.' in s and s.endswith('0'): - s = s[:-1] - result.append(s) - - def doint(result, obj, indent): - if isinstance(obj, bool): - obj = repr(obj).lower() - result.append(str(obj)) - - badch = set('[]{}:=,"\n') - - def dostr(result, obj, indent): - try: - float(obj) - except: - ok = True - else: - ok = obj == obj.strip() - ok = ok and not set(obj) & badch - if ok: - result.append(obj) - return - - obj = obj.replace('\\', '\\\\').replace('\n', '\\n') - obj = obj.replace('"', '\\"') - result.append('"%s"' % obj) - - def dolist(result, obj, indent): - if indent: - if not obj: - result.append('[]') - return - elif isinstance(obj[0], list): - result.append('[]') - indent += ' ' - for item in obj: - dumprecurse(result, item, indent) - return - result.append('[') - obj = [[x, ', '] for x in obj] - obj[-1][-1] = ']' - for item, separator in obj: - dumprecurse(result, item, '') - result.append(separator) - - def dodict(result, obj, indent): - if not obj: - result.append('{}') - return - obj = sorted(obj.iteritems()) - multiline = indent and ( len(obj) > 2 or - len(obj) == 2 and ( - isinstance(obj[0][-1], (list, dict)) or - isinstance(obj[-1][-1], (list, dict)))) - if not multiline and (not indent or len(obj) != 1): - result.append('{') - obj = [[x, ', '] for x in obj] - obj[-1][-1] = '}' - for (key, value), separator in obj: - dumprecurse(result, key, '') - result.append(': ') - dumprecurse(result, value, '') - result.append(separator) - return - doindent = len(obj) > 1 - for key, value in obj: - dumprecurse(result, key, indent, doindent) - result.append(': ') - dumprecurse(result, value, indent + ' ', False) - - def donone(result, obj, indent): - result.append('null') - - dumpfuncs = {float: dofloat, int: doint, basestring: dostr, - list: dolist, dict: dodict, type(None): donone} - - dumpfuncs = dumpfuncs.items() - - def dumprecurse(result, obj, indent='\n', indentnow=True): - if indentnow: - result.append(indent) - for otype, ofunc in dumpfuncs: - if isinstance(obj, otype): - return ofunc(result, obj, indent) - raise ValueError(repr(obj)) - - result = [] - if forcestyledict: - obj = fixstyle(obj) - dumprecurse(result, obj, indentnow=False) - return fixspacing(''.join(result)) - -def fixspacing(s): - ''' Try to make the output prettier by inserting blank lines - in random places. - ''' - result = [] - indent = -1 - for line in s.splitlines(): - line = line.rstrip() # Some lines had ' ' - if not line: - continue - indent, previndent = len(line) - len(line.lstrip()), indent - if indent <= previndent and indent < 8: - if indent < previndent or not indent: - result.append('') - result.append(line) - result.append('') - return '\n'.join(result) - -def fixstyle(obj): - ''' Try to convert styles into a dictionary - ''' - if obj: - if isinstance(obj, list): - lengths = [len(x) for x in obj] - if min(lengths) == max(lengths) == 2: - obj = dict(obj) - elif isinstance(obj, dict) and 'styles' in obj: - obj['styles'] = dict(obj['styles']) - return obj - -def convert(srcname): - ''' Convert a single file from .json to .style - ''' - print srcname - sstr = open(srcname, 'rb').read() - sdata = fixstyle(jloads(sstr)) - dstr = dumps(sdata) - assert sdata == rloads(dstr), "Bad round-trip" - - dstname = srcname.replace('.json', '.style') - dstf = open(dstname, 'wb') - dstf.write(dstr) - dstf.close() - - -if __name__ == '__main__': - for fname in [os.path.join('styles', x) for x in os.listdir('styles') if x.endswith('.json')]: - convert(fname) diff --git a/dist-packages/rst2pdf/rst2pdf/extensions/__init__.py b/dist-packages/rst2pdf/rst2pdf/extensions/__init__.py deleted file mode 100644 index a33e74bef..000000000 --- a/dist-packages/rst2pdf/rst2pdf/extensions/__init__.py +++ /dev/null @@ -1,9 +0,0 @@ -''' -This place-holder module makes the extensions directory -into a Python "package", so that external user-specific -modules can act as umbrella modules, and, for example: - -import rst2pdf.extensions.vectorpdf_r2p - -to bring in the PDF extension. -''' diff --git a/dist-packages/rst2pdf/rst2pdf/extensions/dotted_toc.py b/dist-packages/rst2pdf/rst2pdf/extensions/dotted_toc.py deleted file mode 100644 index 50dc78a68..000000000 --- a/dist-packages/rst2pdf/rst2pdf/extensions/dotted_toc.py +++ /dev/null @@ -1,152 +0,0 @@ -# -*- coding: utf-8 -*- - -# See LICENSE.txt for licensing terms -#$HeadURL$ -#$LastChangedDate$ -#$LastChangedRevision$ - -# Some fragments of code are copied from Reportlab under this license: -# -##################################################################################### -# -# Copyright (c) 2000-2008, ReportLab Inc. -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without modification, -# are permitted provided that the following conditions are met: -# -# * Redistributions of source code must retain the above copyright notice, -# this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above copyright notice, -# this list of conditions and the following disclaimer in the documentation -# and/or other materials provided with the distribution. -# * Neither the name of the company nor the names of its contributors may be -# used to endorse or promote products derived from this software without -# specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. -# IN NO EVENT SHALL THE OFFICERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED -# TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; -# OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER -# IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING -# IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF -# SUCH DAMAGE. -# -##################################################################################### - -from copy import copy -import reportlab -from reportlab.platypus.tableofcontents import drawPageNumbers -import rst2pdf.genelements as genelements - -Table = genelements.Table -Paragraph = genelements.Paragraph - -''' - -.. NOTE: - - THIS IS A HUGE HACK HACK HACK - -All I did was take the wrap() method from the stock reportlab TOC generator, -and make the minimal changes to make it work on MY documents in rst2pdf. - -History: -~~~~~~~~ - -The reportlab TOC generator adds nice dots between the text and the page number. -The rst2pdf one does not. - -A closer examination reveals that the rst2pdf one probably deliberately stripped -this code, because the reportlab implementation only allowed a single TOC, and -this is unacceptable for at least some rst2pdf users. - -There are other differences in the rst2pdf one I don't understand. This module -is a hack to add back dots between the lines. Maybe at some point we can figure -out if this is right, or how to support dots in the TOC in the main code. - -Mind you, the original RL implementation is a complete hack in any case: - -- It uses a callback to a nested function which doesn't even bother to - assume the original enclosing scope is available at callback time. - This leads it to do crazy things like eval() - -- It uses a single name in the canvas for the callback function - (this is what kills multiple TOC capability) when it would be - extremely easy to generate a unique name. -''' - -class DottedTableOfContents(genelements.MyTableOfContents): - - toc_counter = [0] - - def wrap(self, availWidth, availHeight): - "All table properties should be known by now." - - # makes an internal table which does all the work. - # we draw the LAST RUN's entries! If there are - # none, we make some dummy data to keep the table - # from complaining - if len(self._lastEntries) == 0: - if reportlab.Version <= '2.3': - _tempEntries = [(0, 'Placeholder for table of contents', 0)] - else: - _tempEntries = [(0, 'Placeholder for table of contents', - 0, None)] - else: - _tempEntries = self._lastEntries - - if _tempEntries: - base_level = _tempEntries[0][0] - else: - base_level = 0 - - def drawTOCEntryEnd(canvas, kind, label): - '''Callback to draw dots and page numbers after each entry.''' - - style, page, key, dot = end_info[int(label)] - drawPageNumbers(canvas, style, [(page, key)], availWidth, availHeight, dot) - - toc_counter = self.toc_counter - toc_counter[0] += 1 - funcname = 'drawTOCEntryEnd%s' % toc_counter[0] - setattr(self.canv, funcname, drawTOCEntryEnd) - - end_info = [] - tableData = [] - for entry in _tempEntries: - level, text, pageNum = entry[:3] - left_col_level = level - base_level - if reportlab.Version > '2.3': # For ReportLab post-2.3 - style=self.getLevelStyle(left_col_level) - else: # For ReportLab <= 2.3 - style = self.levelStyles[left_col_level] - - if self.dotsMinLevel >= 0 and left_col_level >= self.dotsMinLevel: - dot = ' . ' - else: - dot = '' - - style = copy(style) - style.textColor = self.linkColor - key = self.refid_lut.get((level, text, pageNum), None) - if key: - if not isinstance(text, unicode): - text = unicode(text, 'utf-8') - text = u'%s' % (key, text) - - para = Paragraph('%s' % (text, funcname, len(end_info)), style) - end_info.append((style, pageNum, key, dot)) - if style.spaceBefore: - tableData.append([Spacer(1, style.spaceBefore),]) - tableData.append([para,]) - - self._table = Table(tableData, colWidths=(availWidth,), style=self.tableStyle) - - self.width, self.height = self._table.wrapOn(self.canv,availWidth, availHeight) - return (self.width, self.height) - -genelements.MyTableOfContents = DottedTableOfContents diff --git a/dist-packages/rst2pdf/rst2pdf/extensions/fancytitles.py b/dist-packages/rst2pdf/rst2pdf/extensions/fancytitles.py deleted file mode 100644 index 1a84cdd5f..000000000 --- a/dist-packages/rst2pdf/rst2pdf/extensions/fancytitles.py +++ /dev/null @@ -1,144 +0,0 @@ -# -*- coding: utf-8 -*- -import rst2pdf.genelements as genelements -from rst2pdf.flowables import Heading, MyPageBreak -from rst2pdf.image import MyImage -import docutils -from rst2pdf.opt_imports import Paragraph -import reportlab -import tempfile -import re -from xml.sax.saxutils import unescape -import codecs - -class FancyTitleHandler(genelements.HandleParagraph, docutils.nodes.title): - ''' - This class will handle title nodes. - - It takes a "titletemplate.svg", replaces TITLEGOESHERE with - the actual title text, and draws that using the FancyHeading flowable - (see below). - - Since this class is defined in an extension, it - effectively replaces rst2pdf.genelements.HandleTitle. - ''' - - def gather_elements(self, client, node, style): - # This method is copied from the HandleTitle class - # in rst2pdf.genelements. - - # Special cases: (Not sure this is right ;-) - if isinstance(node.parent, docutils.nodes.document): - #node.elements = [Paragraph(client.gen_pdftext(node), - #client.styles['title'])] - # The visible output is now done by the cover template - node.elements = [] - client.doc_title = node.rawsource - client.doc_title_clean = node.astext().strip() - elif isinstance(node.parent, docutils.nodes.topic): - node.elements = [Paragraph(client.gen_pdftext(node), - client.styles['topic-title'])] - elif isinstance(node.parent, docutils.nodes.Admonition): - node.elements = [Paragraph(client.gen_pdftext(node), - client.styles['admonition-title'])] - elif isinstance(node.parent, docutils.nodes.table): - node.elements = [Paragraph(client.gen_pdftext(node), - client.styles['table-title'])] - elif isinstance(node.parent, docutils.nodes.sidebar): - node.elements = [Paragraph(client.gen_pdftext(node), - client.styles['sidebar-title'])] - else: - # Section/Subsection/etc. - text = client.gen_pdftext(node) - fch = node.children[0] - if isinstance(fch, docutils.nodes.generated) and \ - fch['classes'] == ['sectnum']: - snum = fch.astext() - else: - snum = None - key = node.get('refid') - maxdepth=4 - if reportlab.Version > '2.1': - maxdepth=6 - - # The parent ID is the refid + an ID to make it unique for Sphinx - parent_id=(node.parent.get('ids', [None]) or [None])[0]+u'-'+unicode(id(node)) - if client.depth > 1: - node.elements = [ Heading(text, - client.styles['heading%d'%min(client.depth, maxdepth)], - level=client.depth-1, - parent_id=parent_id, - node=node, - )] - else: # This is an important title, do our magic ;-) - # Hack the title template SVG - tfile = codecs.open('titletemplate.svg','r','utf-8') - tdata = tfile.read() - tfile.close() - tfile = tempfile.NamedTemporaryFile(dir='.', delete=False, suffix='.svg') - tfname = tfile.name - tfile.write(tdata.replace('TITLEGOESHERE', text).encode('utf-8')) - tfile.close() - - # Now tfname contains a SVG with the right title. - # Make rst2pdf delete it later. - client.to_unlink.append(tfname) - - e = FancyHeading(tfname, - width=700, - height=100, - client=client, - snum=snum, - parent_id=parent_id, - text=text, - hstyle=client.styles['heading%d'%min(client.depth, maxdepth)]) - - node.elements = [e] - - if client.depth <= client.breaklevel: - node.elements.insert(0, MyPageBreak(breakTo=client.breakside)) - return node.elements - -class FancyHeading(MyImage, Heading): - '''This is a cross between the Heading flowable, that adds outline - entries so you have a PDF TOC, and MyImage, that draws images''' - - def __init__(self, *args, **kwargs): - # The inicialization is taken from rst2pdf.flowables.Heading - hstyle = kwargs.pop('hstyle') - level = 0 - text = kwargs.pop('text') - self.snum = kwargs.pop('snum') - self.parent_id= kwargs.pop('parent_id') - #self.stext = - Heading.__init__(self,text,hstyle,level=level, - parent_id=self.parent_id) - # Cleanup title text - #self.stext = re.sub(r'<[^>]*?>', '', unescape(self.stext)) - #self.stext = self.stext.strip() - - # Stuff needed for the outline entry - MyImage.__init__(self, *args, **kwargs) - - def drawOn(self,canv,x,y,_sW): - - ## These two lines are magic. - #if isinstance(self.parent_id, tuple): - #self.parent_id=self.parent_id[0] - - # Add outline entry. This is copied from rst2pdf.flowables.heading - canv.bookmarkHorizontal(self.parent_id,0,y+self.image.height) - - if canv.firstSect: - canv.sectName = self.stext - canv.firstSect=False - if self.snum is not None: - canv.sectNum = self.snum - else: - canv.sectNum = "" - - canv.addOutlineEntry(self.stext.encode('utf-8','replace'), - self.parent_id.encode('utf-8','replace'), - int(self.level), False) - - # And let MyImage do all the drawing - MyImage.drawOn(self,canv,x,y,_sW) diff --git a/dist-packages/rst2pdf/rst2pdf/extensions/inkscape_r2p.py b/dist-packages/rst2pdf/rst2pdf/extensions/inkscape_r2p.py deleted file mode 100644 index 32118c8a9..000000000 --- a/dist-packages/rst2pdf/rst2pdf/extensions/inkscape_r2p.py +++ /dev/null @@ -1,88 +0,0 @@ -# -*- coding: utf-8 -*- -# See LICENSE.txt for licensing terms - -''' -inkscape.py is an rst2pdf extension (e.g. rst2pdf -e inkscape xxx xxxx) -which uses the inkscape program to convert an svg to a PDF, then uses -the vectorpdf code to process the PDF. - -.. NOTE:: - - The initial version is a proof of concept; uses subprocess in a naive way, - and doesn't check return from inkscape for errors. -''' - -import sys, os, tempfile, subprocess -from weakref import WeakKeyDictionary -from rst2pdf.log import log - -from vectorpdf_r2p import VectorPdf -import rst2pdf.image - - -if sys.platform.startswith('win'): - # note: this is the default "all users" install location, - # we might want to provide an option for this - progname = os.path.expandvars(r'$PROGRAMFILES\Inkscape\inkscape.exe') -else: - progname = 'inkscape' - -class InkscapeImage(VectorPdf): - - # The filecache allows us to only read a given PDF file once - # for every RstToPdf client object. This allows this module - # to usefully cache, while avoiding being the cause of a memory - # leak in a long-running process. - - source_filecache = WeakKeyDictionary() - - @classmethod - def available(self): - return True - - def __init__(self, filename, width=None, height=None, kind='direct', - mask=None, lazy=True, srcinfo=None): - client, uri = srcinfo - cache = self.source_filecache.setdefault(client, {}) - pdffname = cache.get(filename) - if pdffname is None: - tmpf, pdffname = tempfile.mkstemp(suffix='.pdf') - os.close(tmpf) - client.to_unlink.append(pdffname) - cache[filename] = pdffname - cmd = [progname, os.path.abspath(filename), '-A', pdffname] - try: - subprocess.call(cmd) - except OSError, e: - log.error("Failed to run command: %s", ' '.join(cmd)) - raise - self.load_xobj((client, pdffname)) - - pdfuri = uri.replace(filename, pdffname) - pdfsrc = client, pdfuri - VectorPdf.__init__(self, pdfuri, width, height, kind, mask, lazy, pdfsrc) - - @classmethod - def raster(self, filename, client): - """Returns a URI to a rasterized version of the image""" - cache = self.source_filecache.setdefault(client, {}) - pngfname = cache.get(filename+'_raster') - if pngfname is None: - tmpf, pngfname = tempfile.mkstemp(suffix='.png') - os.close(tmpf) - client.to_unlink.append(pngfname) - cache[filename+'_raster'] = pngfname - cmd = [progname, os.path.abspath(filename), '-e', pngfname, '-d', str(client.def_dpi)] - try: - subprocess.call(cmd) - return pngfname - except OSError, e: - log.error("Failed to run command: %s", ' '.join(cmd)) - raise - return None - - -def install(createpdf, options): - ''' Monkey-patch our class in to image as a replacement class for SVGImage. - ''' - rst2pdf.image.SVGImage = InkscapeImage diff --git a/dist-packages/rst2pdf/rst2pdf/extensions/plantuml.py b/dist-packages/rst2pdf/rst2pdf/extensions/plantuml.py deleted file mode 100644 index 3230fcf37..000000000 --- a/dist-packages/rst2pdf/rst2pdf/extensions/plantuml.py +++ /dev/null @@ -1,80 +0,0 @@ -''' -A rst2pdf extension to implement something similar to sphinx's plantuml extension -(see http://pypi.python.org/pypi/sphinxcontrib-plantuml) - -Therefore, stuff may be copied from that code. -Ergo: - - :copyright: Copyright 2010 by Yuya Nishihara . - :license: BSD, (he says see LICENSE but the file is not there ;-) - -''' - -import errno -from docutils import nodes -from docutils.parsers import rst -from docutils.parsers.rst import directives -import rst2pdf.genelements as genelements -from rst2pdf.image import MyImage -import tempfile -import subprocess - -class plantuml(nodes.General, nodes.Element): - pass - - -class UmlDirective(rst.Directive): - """Directive to insert PlantUML markup - - Example:: - - .. uml:: - :alt: Alice and Bob - - Alice -> Bob: Hello - Alice <- Bob: Hi - - - You can use a :format: option to change between SVG and PNG diagrams, however, - the SVG plantuml generates doesn't look very good to me. - """ - has_content = True - option_spec = { - 'alt': directives.unchanged, - 'format': directives.unchanged, - } - - def run(self): - node = plantuml() - node['uml'] = '\n'.join(self.content) - node['alt'] = self.options.get('alt', None) - node['format'] = self.options.get('format', 'png') - return [node] - - -class UMLHandler(genelements.NodeHandler, plantuml): - """Class to handle UML nodes""" - - def gather_elements(self, client, node, style): - # Create image calling plantuml - tfile = tempfile.NamedTemporaryFile(dir='.', delete=False, suffix='.'+node['format']) - args = 'plantuml -pipe -charset utf-8' - if node['format'].lower() == 'svg': - args+=' -tsvg' - client.to_unlink.append(tfile.name) - try: - p = subprocess.Popen(args.split(), stdout=tfile, - stdin=subprocess.PIPE, stderr=subprocess.PIPE) - except OSError, err: - if err.errno != errno.ENOENT: - raise - raise PlantUmlError('plantuml command %r cannot be run' - % self.builder.config.plantuml) - serr = p.communicate(node['uml'].encode('utf-8'))[1] - if p.returncode != 0: - raise PlantUmlError('error while running plantuml\n\n' + serr) - - # Add Image node with the right image - return [MyImage(tfile.name, client=client)] - -directives.register_directive("uml", UmlDirective) diff --git a/dist-packages/rst2pdf/rst2pdf/extensions/preprocess_r2p.py b/dist-packages/rst2pdf/rst2pdf/extensions/preprocess_r2p.py deleted file mode 100644 index 96901d97f..000000000 --- a/dist-packages/rst2pdf/rst2pdf/extensions/preprocess_r2p.py +++ /dev/null @@ -1,366 +0,0 @@ -# -*- coding: utf-8 -*- - -# An extension module for rst2pdf -# Copyright 2010, Patrick Maupin -# See LICENSE.txt for licensing terms - -''' -preprocess is a rst2pdf extension module (invoked by -e preprocess -on the rst2pdf command line. - -There is a testcase for this file at rst2pdf/tests/test_preprocess.txt - -This preprocesses the source text file before handing it to docutils. - -This module serves two purposes: - -1) It demonstrates the technique and can be a starting point for similar - user-written processing modules; and - -2) It provides a simplified syntax for documents which are targeted only - at rst2pdf, rather than docutils in general. - -The design goal of "base rst2pdf" is to be completely compatible with -docutils, such that a file which works as a PDF can also work as HTML, -etc. - -Unfortunately, base docutils is a slow-moving target, and does not -make this easy. For example, SVG images do not work properly with -the HTML backend unless you install a patch, and docutils has no -concept of page breaks or additional vertical space (other than -the
). - -So, while it would be nice to have documents that render perfectly -with any backend, this goal is hard to achieve for some documents, -and once you are restricted to a particular transformation type, -then you might as well have a slightly nicer syntax for your source -document. - ------------------------------------------------------------------ - -Preprocessor extensions: - -All current extensions except style occupy a single line in the -source file. - -``.. include::`` - - Processes the include file as well. An include file may - either be a restructured text file, OR may be an RSON or - JSON stylesheet. The determination is made by trying to - parse it as RSON. If it passes, it is a stylesheet; if not, - well, we'll let the docutils parser have its way with it. - -``.. page::`` - - Is translated into a raw PageBreak. - -``.. space::`` - - Is translated into a raw Spacer. If only one number given, is - used for vertical space. This is the canonical use case, since - horizontal space is ignored anyway! - -``.. style::`` - - Allows you to create in-line stylesheets. As with other - restructured text components, the stylesheet data must - be indented. Stylesheets are in RSON or JSON. - -``.. widths::`` - - creates a new table style (based on table or the first - non-numeric token) and creates a class using that style - specifically for the next table in the document. (Creates - a .. class::, so you must specify .. widths:: immediately - before the table it applies to. Allows you to set the - widths for the table, using percentages. - -``SingleWordAtLeftColumn`` - - If a single word at the left column is surrounded by - blank lines, the singleword style is automatically applied to - the word. This is a workaround for the broken interaction - between docutils subtitles and bibliographic metadata. (I - found that docutils was referencing my subtitles from inside - the TOC, and that seemed silly. Perhaps there is a better - workaround at a lower level in rst2pdf.) - ------------------------------------------------------------------ - -Preprocessor operation: - -The preprocessor generates a file that has the same name as the source -file, with .build_temp. embedded in the name, and then passes that -file to the restructured text parser. - -This file is left on the disk after operation, because any error -messages from docutils will refer to line numbers in it, rather than -in the original source, so debugging could be difficult if the -file were automatically removed. - -''' - -import os -import re - -from rst2pdf.rson import loads as rson_loads - -from rst2pdf.log import log - -class DummyFile(str): - ''' We could use stringio, but that's really overkill for what - we need here. - ''' - def read(self): - return self - -class Preprocess(object): - def __init__(self, sourcef, incfile=False, widthcount=0): - ''' Process a file and decorate the resultant Preprocess instance with - self.result (the preprocessed file) and self.styles (extracted stylesheet - information) for the caller. - ''' - self.widthcount = widthcount - - name = sourcef.name - source = sourcef.read().replace('\r\n', '\n').replace('\r', '\n') - - # Make the determination if an include file is a stylesheet or - # another restructured text file, and handle stylesheets appropriately. - - if incfile: - try: - self.styles = styles = rson_loads(source) - substyles = styles.get('styles') - if substyles is not None: - styles['styles'] = dict(substyles) - except: - pass - else: - self.changed = True - self.keep = False - return - - # Read the whole file and wrap it in a DummyFile - self.sourcef = DummyFile(source) - self.sourcef.name = name - - # Use a regular expression on the source, to take it apart - # and put it back together again. - - self.source = source = [x for x in self.splitter(source) if x] - self.result = result = [] - self.styles = {} - self.changed = False - - # More efficient to pop() a list than to keep taking tokens from [0] - source.reverse() - isblank = False - keywords = self.keywords - handle_single = keywords['single::'] - while source: - wasblank = isblank - isblank = False - chunk = source.pop() - result.append(chunk) - - # Only process single lines - if not chunk.endswith('\n'): - continue - result[-1] = chunk[:-1] - if chunk.index('\n') != len(chunk)-1: - continue - - # Parse the line to look for one of our keywords. - tokens = chunk.split() - isblank = not tokens - if len(tokens) >= 2 and tokens[0] == '..' and tokens[1].endswith('::'): - func = keywords.get(tokens[1]) - if func is None: - continue - chunk = chunk.split('::', 1)[1] - elif wasblank and len(tokens) == 1 and chunk[0].isalpha() and tokens[0].isalpha(): - func = handle_single - chunk = tokens[0] - else: - continue - - result.pop() - func(self, chunk.strip()) - - # Determine if we actually did anything or not. Just use our source file - # if not. Otherwise, write the results to disk (so the user can use them - # for debugging) and return them. - if self.changed: - result.append('') - result = DummyFile('\n'.join(result)) - result.name = name + '.build_temp' - self.keep = keep = len(result.strip()) - if keep: - f = open(result.name, 'wb') - f.write(result) - f.close() - self.result = result - else: - self.result = self.sourcef - - def handle_include(self, fname): - # Ugly, violates DRY, etc., but I'm not about to go - # figure out how to re-use docutils include file - # path processing! - - for prefix in ('', os.path.dirname(self.sourcef.name)): - try: - f = open(os.path.join(prefix, fname), 'rb') - except IOError: - continue - else: - break - else: - log.error("Could not find include file %s", fname) - self.changed = True - return - - # Recursively call this class to process include files. - # Extract all the information from the included file. - - inc = Preprocess(f, True, self.widthcount) - self.widthcount = inc.widthcount - if 'styles' in self.styles and 'styles' in inc.styles: - self.styles['styles'].update(inc.styles.pop('styles')) - self.styles.update(inc.styles) - if inc.changed: - self.changed = True - if not inc.keep: - return - fname = inc.result.name - self.result.extend(['', '', '.. include:: ' + fname, '']) - - def handle_single(self, word): - ''' Prepend the singleword class in front of the word. - ''' - self.changed = True - self.result.extend(['', '', '.. class:: singleword', '', word, '']) - - def handle_page(self, chunk): - ''' Insert a raw pagebreak - ''' - self.changed = True - self.result.extend(['', '', '.. raw:: pdf', '', - ' PageBreak ' + chunk, '']) - - def handle_space(self, chunk): - ''' Insert a raw space - ''' - self.changed = True - if len(chunk.replace(',', ' ').split()) == 1: - chunk = '0 ' + chunk - self.result.extend(['', '', '.. raw:: pdf', '', - ' Spacer ' + chunk, '']) - - def handle_widths(self, chunk): - ''' Insert a unique style in the stylesheet, and reference it - from a .. class:: comment. - ''' - self.changed = True - chunk = chunk.replace(',', ' ').replace('%', ' ').split() - if not chunk: - log.error('no widths specified in .. widths ::') - return - parent = chunk[0][0].isalpha() and chunk.pop(0) or 'table' - values = [float(x) for x in chunk] - total = sum(values) - values = [int(round(100 * x / total)) for x in values] - while 1: - total = sum(values) - if total > 100: - values[values.index(max(values))] -= 1 - elif total < 100: - values[values.index(max(values))] += 1 - else: - break - - values = ['%s%%' % x for x in values] - self.widthcount += 1 - stylename = 'embeddedtablewidth%d' % self.widthcount - self.styles.setdefault('styles', {})[stylename] = dict(parent=parent, colWidths=values) - self.result.extend(['', '', '.. class:: ' + stylename, '']) - - def handle_style(self, chunk): - ''' Parse through the source until we find lines that are no longer indented, - then pass our indented lines to the RSON parser. - ''' - self.changed = True - if chunk: - log.error(".. style:: does not recognize string %s" % repr(chunk)) - return - - mystyles = '\n'.join(self.read_indented()) - if not mystyles: - log.error("Empty .. style:: block found") - try: - styles = rson_loads(mystyles) - except ValueError, e: # Error parsing the JSON data - log.critical('Error parsing stylesheet "%s": %s'%\ - (mystyles, str(e))) - else: - self.styles.setdefault('styles', {}).update(styles) - - def read_indented(self): - ''' Read data from source while it is indented (or blank). - Stop on the first non-indented line, and leave the rest - on the source. - ''' - source = self.source - data = None - while source and not data: - data = source and source.pop().splitlines() or [] - data.reverse() - while data: - line = data.pop().rstrip() - if not line or line.lstrip() != line: - yield line - continue - data.append(line) - break - data.reverse() - data.append('') - source.append('\n'.join(data)) - source.append('\n') - - # Automatically generate our keywords from methods prefixed with 'handle_' - keywords = list(x[7:] for x in vars() if x.startswith('handle_')) - - # Generate the regular expression for parsing, and a split function using it. - blankline = r'^([ \t]*\n)' - singleword = r'^([A-Za-z]+[ \t]*\n)(?=[ \t]*\n)' - comment = r'^(\.\.[ \t]+(?:%s)\:\:.*\n)' % '|'.join(keywords) - expression = '(?:%s)' % '|'.join([blankline, singleword, comment]) - splitter = re.compile(expression, re.MULTILINE).split - - # Once we have used the keywords in our regular expression, - # fix them up for use by the parser. - keywords = dict([(x + '::', vars()['handle_' + x]) for x in keywords]) - -class MyStyles(str): - ''' This class conforms to the styles.py processing requirements - for a stylesheet that is not really a file. It must be callable(), - and str(x) must return the name of the stylesheet. - ''' - def __new__(cls, styles): - self = str.__new__(cls, 'Embedded Preprocess Styles') - self.data = styles - return self - def __call__(self): - return self.data - -def install(createpdf, options): - ''' This is where we intercept the document conversion. - Preprocess the restructured text, and insert our - new styles (if any). - ''' - data = Preprocess(options.infile) - options.infile = data.result - if data.styles: - options.style.append(MyStyles(data.styles)) diff --git a/dist-packages/rst2pdf/rst2pdf/extensions/sample.py b/dist-packages/rst2pdf/rst2pdf/extensions/sample.py deleted file mode 100644 index 3ec805d9e..000000000 --- a/dist-packages/rst2pdf/rst2pdf/extensions/sample.py +++ /dev/null @@ -1,20 +0,0 @@ -print ''' -This is a sample rst2pdf extension. - -Because it is named 'sample.py' you can get rst2pdf to import it by -putting '-e sample' on the rst2pdf command line. - -An extension is called after the command-line is parsed, and can -monkey-patch any necessary changes into rst2pdf. - -An extension can live either in the extensions subdirectory, or -anywhere on the python path. -''' - -def install(createpdf, options): - ''' This function is called with an object with the createpdf - module globals as attributes, and with the options from - the command line parser. This function does not have - to exist, but must have the correct call signature if - it does. - ''' diff --git a/dist-packages/rst2pdf/rst2pdf/extensions/vectorpdf_r2p.py b/dist-packages/rst2pdf/rst2pdf/extensions/vectorpdf_r2p.py deleted file mode 100644 index 90d56331b..000000000 --- a/dist-packages/rst2pdf/rst2pdf/extensions/vectorpdf_r2p.py +++ /dev/null @@ -1,155 +0,0 @@ -# -*- coding: utf-8 -*- -# See LICENSE.txt for licensing terms - -import sys -import os -from weakref import WeakKeyDictionary -from copy import copy - -try: - from reportlab.rl_config import _FUZZ - from reportlab.platypus import Flowable - from reportlab.lib.enums import TA_LEFT, TA_CENTER, TA_RIGHT - - import pdfrw - from pdfrw.toreportlab import makerl - from pdfrw.buildxobj import CacheXObj - - from rst2pdf.log import log - import rst2pdf.image - from rst2pdf.opt_imports import LazyImports -except ImportError: - # This is just to make nosetest happy on the CI server - class Flowable: - pass - - # TODO: Looks the same as for other images, because I - # stole it from other image handlers. Common base class??? - - -class AnyCache(object): - ''' This is a memory leak waiting to happen. - It is used by the raster method. Not yet - sure how to define scope on these cached items. - ''' - -# This is monkey-patched into reportlab IFF we are using -# PDF files inside paragraphs. - -def drawImage(self, image, x, y, width=None, height=None, mask=None, - preserveAspectRatio=False, anchor='c'): - if not isinstance(image, VectorPdf): - return self._drawImageNotVectorPDF(image, x, y, width, height, mask, - preserveAspectRatio, anchor) - image.drawOn(self, x, y, width=width, height=height) - -class VectorPdf(Flowable): - - # The filecache allows us to only read a given PDF file once - # for every RstToPdf client object. This allows this module - # to usefully cache, while avoiding being the cause of a memory - # leak in a long-running process. - - filecache = WeakKeyDictionary() - - @classmethod - def load_xobj(cls, srcinfo): - client, uri = srcinfo - loader = cls.filecache.get(client) - if loader is None: - loader = cls.filecache[client] = CacheXObj().load - return loader(uri) - - def __init__(self, filename, width=None, height=None, kind='direct', - mask=None, lazy=True, srcinfo=None): - Flowable.__init__(self) - self._kind = kind - self.xobj = xobj = self.load_xobj(srcinfo) - self.imageWidth, self.imageHeight = imageWidth, imageHeight = xobj.w, xobj.h - width = width or imageWidth - height = height or imageHeight - if kind in ['bound','proportional']: - factor = min(float(width)/imageWidth,float(height)/imageHeight) - width = factor * imageWidth - height = factor * imageHeight - self.drawWidth = width - self.drawHeight = height - - def wrap(self, aW, aH): - return self.drawWidth, self.drawHeight - - def drawOn(self, canv, x, y, _sW=0, width=0, height=0): - if _sW > 0 and hasattr(self, 'hAlign'): - a = self.hAlign - if a in ('CENTER', 'CENTRE', TA_CENTER): - x += 0.5*_sW - elif a in ('RIGHT', TA_RIGHT): - x += _sW - elif a not in ('LEFT', TA_LEFT): - raise ValueError("Bad hAlign value " + str(a)) - - xobj = self.xobj - xobj_name = makerl(canv._doc, xobj) - - xscale = (width or self.drawWidth) / xobj.w - yscale = (height or self.drawHeight) / xobj.h - x -= xobj.x * xscale - y -= xobj.y * yscale - - canv.saveState() - canv.translate(x, y) - canv.scale(xscale, yscale) - canv.doForm(xobj_name) - canv.restoreState() - - def _restrictSize(self,aW,aH): - if self.drawWidth>aW+_FUZZ or self.drawHeight>aH+_FUZZ: - self._oldDrawSize = self.drawWidth, self.drawHeight - factor = min(float(aW)/self.drawWidth,float(aH)/self.drawHeight) - self.drawWidth *= factor - self.drawHeight *= factor - return self.drawWidth, self.drawHeight - - def getSize(self): - return self.drawWidth, self.drawHeight - - @staticmethod - def SleazyPDFCheck(fname): - return fname.split('#',1)[0].rsplit('.',1)[1].lower() == 'pdf' - - OldImageReader = None - - @classmethod - def NewImageReader(cls, fname): - if cls.SleazyPDFCheck(fname): - return cls(None, srcinfo=(AnyCache, fname)) - else: - return cls.OldImageReader(fname) - - @classmethod - def raster(cls, fname, client): - ''' We don't REALLY generate a raster image. - Instead, we attempt to monkey-patch reportlab - to call us with the filename again later. - ''' - if cls.OldImageReader is None: - import reportlab.platypus.paraparser as p - cls.OldImageReader = p.ImageReader - p.ImageReader = cls.NewImageReader - from reportlab.pdfgen.canvas import Canvas as c - c._drawImageNotVectorPDF = c.drawImage - c.drawImage = drawImage - return fname - - def __deepcopy__(self, *whatever): - # VectorPDF class is not deep copyable. Stop the copy at this - # class. Related to issue 126, but cropped up later when - # we added fake raster stuff for reportlab tag. - return copy(self) - - -def install(createpdf, options): - ''' Monkey-patch this PDF handling into rst2pdf - ''' - LazyImports.pdfinfo = pdfrw - rst2pdf.image.VectorPdf = VectorPdf diff --git a/dist-packages/rst2pdf/rst2pdf/findfonts.py b/dist-packages/rst2pdf/rst2pdf/findfonts.py deleted file mode 100644 index 9b4cdf30b..000000000 --- a/dist-packages/rst2pdf/rst2pdf/findfonts.py +++ /dev/null @@ -1,385 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# See LICENSE.txt for licensing terms - -""" -Scan a list of folders and find all .afm files, -then create rst2pdf-ready font-aliases. -""" - -import os -import sys - -from reportlab.pdfbase import pdfmetrics -from reportlab.pdfbase.ttfonts import TTFont, TTFontFile, TTFError, FF_FORCEBOLD, FF_ITALIC -from reportlab.lib.fonts import addMapping - -from log import log - -flist = [] -afmList = [] -pfbList = {} -ttfList = [] - -# Aliases defined by GhostScript, so if you use Palatino or whatever you -# may get **something**. They are family name aliases. -Alias = { - 'itc bookman': 'urw bookman l', - 'itc avant garde gothic': 'urw gothic l', - 'palatino': 'urw palladio l', - 'new century schoolbook': 'century schoolbook l', - 'itc zapf chancery': 'urw chancery l'} - -# Standard PDF fonts, so no need to embed them -Ignored = ['times', 'itc zapf dingbats', 'symbol', 'helvetica', 'courier'] - - -fonts = {} -families = {} -fontMappings = {} - - -def loadFonts(): - """ - Search the system and build lists of available fonts. - """ - - if not afmList and not pfbList and not ttfList: - # Find all ".afm" and ".pfb" files files - def findFontFiles(_, folder, names): - for f in os.listdir(folder): - ext=os.path.splitext(f)[-1] - if ext in ['.ttf','.ttc']: - ttfList.append(os.path.join(folder, f)) - if ext=='.afm': - afmList.append(os.path.join(folder, f)) - if ext=='.pfb': - pfbList[f[:-4]] = os.path.join(folder, f) - - for folder in flist: - os.path.walk(folder, findFontFiles, None) - - for ttf in ttfList: - '''Find out how to process these''' - try: - font = TTFontFile(ttf) - except TTFError: - continue - - #print ttf, font.name, font.fullName, font.styleName, font.familyName - family=font.familyName.lower() - fontName=font.name - baseName = os.path.basename(ttf)[:-4] - fullName=font.fullName - - fonts[fontName.lower()] = (ttf, ttf, family) - fonts[fullName.lower()] = (ttf, ttf, family) - fonts[fullName.lower().replace('italic','oblique')] = (ttf, ttf, family) - bold = (FF_FORCEBOLD == FF_FORCEBOLD & font.flags) - italic = (FF_ITALIC == FF_ITALIC & font.flags) - - # And we can try to build/fill the family mapping - if family not in families: - families[family] = [fontName, fontName, fontName, fontName] - if bold and italic: - families[family][3] = fontName - elif bold: - families[family][1] = fontName - elif italic: - families[family][2] = fontName - # FIXME: what happens if there are Demi and Medium - # weights? We get a random one. - else: - families[family][0] = fontName - - # Now we have full afm and pbf lists, process the - # afm list to figure out family name, weight and if - # it's italic or not, as well as where the - # matching pfb file is - - for afm in afmList: - family = None - fontName = None - italic = False - bold = False - for line in open(afm, 'r'): - line = line.strip() - if line.startswith('StartCharMetrics'): - break - elif line.startswith('FamilyName'): - family = ' '.join(line.split(' ')[1:]).lower() - elif line.startswith('FontName'): - fontName = line.split(' ')[1] - # TODO: find a way to alias the fullname to this font - # so you can use names like "Bitstream Charter Italic" - elif line.startswith('FullName'): - fullName = ' '.join(line.split(' ')[1:]) - elif line.startswith('Weight'): - w = line.split(' ')[1] - if w == 'Bold': - bold = True - elif line.startswith('ItalicAngle'): - if line.split(' ')[1] != '0.0': - italic = True - - baseName = os.path.basename(afm)[:-4] - if family in Ignored: - continue - if family in Alias: - continue - if baseName not in pfbList: - log.info("afm file without matching pfb file: %s"% baseName) - continue - - # So now we have a font we know we can embed. - fonts[fontName.lower()] = (afm, pfbList[baseName], family) - fonts[fullName.lower()] = (afm, pfbList[baseName], family) - fonts[fullName.lower().replace('italic','oblique')] = (afm, pfbList[baseName], family) - - # And we can try to build/fill the family mapping - if family not in families: - families[family] = [fontName, fontName, fontName, fontName] - if bold and italic: - families[family][3] = fontName - elif bold: - families[family][1] = fontName - elif italic: - families[family][2] = fontName - # FIXME: what happens if there are Demi and Medium - # weights? We get a random one. - else: - families[family][0] = fontName - -def findFont(fname): - loadFonts() - # So now we are sure we know the families and font - # names. Well, return some data! - fname=fname.lower() - if fname in fonts: - font = fonts[fname.lower()] - else: - if fname in Alias: - fname = Alias[fname] - if fname in families: - font = fonts[families[fname][0].lower()] - else: - return None - return font - -def findTTFont(fname): - - def get_family(query): - data = os.popen("fc-match \"%s\""%query, "r").read() - for line in data.splitlines(): - line = line.strip() - if not line: - continue - fname, family, _, variant = line.split('"')[:4] - family = family.replace('"', '') - if family: - return family - return None - - def get_fname(query): - data = os.popen("fc-match -v \"%s\""%query, "r").read() - for line in data.splitlines(): - line = line.strip() - if line.startswith("file: "): - return line.split('"')[1] - return None - - def get_variants(family): - variants = [ - get_fname(family + ":style=Roman"), - get_fname(family + ":style=Bold"), - get_fname(family + ":style=Oblique"), - get_fname(family + ":style=Bold Oblique")] - if variants[2] == variants[0]: - variants[2] = get_fname(family + ":style=Italic") - if variants[3] == variants[0]: - variants[3] = get_fname(family + ":style=Bold Italic") - if variants[0].endswith('.pfb') or variants[0].endswith('.gz'): - return None - return variants - - if os.name != 'nt': - family = get_family(fname) - if not family: - log.error("Unknown font: %s", fname) - return None - return get_variants(family) - else: - # lookup required font in registry lookup, alternative approach - # is to let loadFont() traverse windows font directory or use - # ctypes with EnumFontFamiliesEx - - def get_nt_fname(ftname): - import _winreg as _w - fontkey = _w.OpenKey(_w.HKEY_LOCAL_MACHINE, - "SOFTWARE\Microsoft\Windows NT\CurrentVersion\Fonts") - fontname = ftname + " (TrueType)" - try: - fname = _w.QueryValueEx(fontkey, fontname)[0] - if os.path.isabs(fname): - fontkey.close() - return fname - fontdir = os.environ.get("SystemRoot", u"C:\\Windows") - fontdir += u"\\Fonts" - fontkey.Close() - return fontdir + "\\" + fname - except WindowsError, err: - fontkey.Close() - return None - - family, pos = guessFont(fname) - fontfile = get_nt_fname(fname) - if not fontfile: - if pos == 0: - fontfile = get_nt_fname(family) - elif pos == 1: - fontfile = get_nt_fname(family + " Bold") - elif pos == 2: - fontfile = get_nt_fname(family + " Italic") or \ - get_nt_fname(family + " Oblique") - else: - fontfile = get_nt_fname(family + " Bold Italic") or \ - get_nt_fname(family + " Bold Oblique") - - if not fontfile: - log.error("Unknown font: %s", fname) - return None - - family, pos = guessFont(fname) - variants = [ - get_nt_fname(family) or fontfile, - get_nt_fname(family+" Bold") or fontfile, - get_nt_fname(family+" Italic") or \ - get_nt_fname(family+" Oblique") or fontfile, - get_nt_fname(family+" Bold Italic") or \ - get_nt_fname(family+" Bold Oblique") or fontfile, - ] - return variants - -def autoEmbed(fname): - """Given a font name, does a best-effort of embedding - said font and its variants. - - Returns a list of the font names it registered with ReportLab. - - """ - log.info('Trying to embed %s'%fname) - fontList = [] - variants=[] - f = findFont(fname) - if f : # We have this font located - if f[0].lower()[-4:]=='.afm': #Type 1 font - family = families[f[2]] - - # Register the whole family of faces - faces = [pdfmetrics.EmbeddedType1Face(*fonts[fn.lower()][:2]) for fn in family] - for face in faces: - pdfmetrics.registerTypeFace(face) - - for face, name in zip(faces, family): - fontList.append(name) - font = pdfmetrics.Font(face, name, "WinAnsiEncoding") - log.info('Registering font: %s from %s'%\ - (face,name)) - pdfmetrics.registerFont(font) - - # Map the variants - regular, italic, bold, bolditalic = family - addMapping(fname, 0, 0, regular) - addMapping(fname, 0, 1, italic) - addMapping(fname, 1, 0, bold) - addMapping(fname, 1, 1, bolditalic) - addMapping(regular, 0, 0, regular) - addMapping(regular, 0, 1, italic) - addMapping(regular, 1, 0, bold) - addMapping(regular, 1, 1, bolditalic) - log.info('Embedding as %s'%fontList) - return fontList - else: # A TTF font - variants = [fonts[f.lower()][0] for f in families[f[2]]] - if not variants: # Try fc-match - variants = findTTFont(fname) - # It is a TT Font and we found it using fc-match (or found *something*) - if variants: - for variant in variants: - vname = os.path.basename(variant)[:-4] - try: - if vname not in pdfmetrics._fonts: - _font=TTFont(vname, variant) - log.info('Registering font: %s from %s'%\ - (vname,variant)) - pdfmetrics.registerFont(_font) - except TTFError: - log.error('Error registering font: %s from %s'%(vname,variant)) - else: - fontList.append(vname) - regular, bold, italic, bolditalic = [ - os.path.basename(variant)[:-4] for variant in variants] - addMapping(regular, 0, 0, regular) - addMapping(regular, 0, 1, italic) - addMapping(regular, 1, 0, bold) - addMapping(regular, 1, 1, bolditalic) - log.info('Embedding via findTTFont as %s'%fontList) - return fontList - - -def guessFont(fname): - """Given a font name like "Tahoma-BoldOblique", "Bitstream Charter Italic" - or "Perpetua Bold Italic" guess what it means. - - Returns (family, x) where x is - 0: regular - 1: bold - 2: italic - 3: bolditalic - - """ - italic = 0 - bold = 0 - if '-' not in fname: - sfx = {"Bold":1, "Bold Italic":3, "Bold Oblique":3, "Italic":2, - "Oblique":2} - for key in sfx: - if fname.endswith(" "+key): - return fname.rpartition(key)[0], sfx[key] - return fname, 0 - - else: - family, mod = fname.rsplit('-', 1) - - mod = mod.lower() - if "oblique" in mod or "italic" in mod: - italic = 1 - if "bold" in mod: - bold = 1 - - if bold+italic == 0: #Not really a modifier - return fname, 0 - return family, bold + 2*italic - - -def main(): - global flist - if len(sys.argv) != 2: - print "Usage: findfont fontName" - sys.exit(1) - if os.name == 'nt': - flist = [".", os.environ.get("SystemRoot", "C:\\Windows")+"\\Fonts"] - else: - flist = [".", "/usr/share/fonts", "/usr/share/texmf-dist/fonts"] - fn, pos = guessFont(sys.argv[1]) - f = findFont(fn) - if not f: - f = findTTFont(fn) - if f: - print f - else: - print "Unknown font %s" % sys.argv[1] - - -if __name__ == "__main__": - main() diff --git a/dist-packages/rst2pdf/rst2pdf/flowables.py b/dist-packages/rst2pdf/rst2pdf/flowables.py deleted file mode 100644 index 9ba48aa05..000000000 --- a/dist-packages/rst2pdf/rst2pdf/flowables.py +++ /dev/null @@ -1,1044 +0,0 @@ -# -*- coding: utf-8 -*- -# See LICENSE.txt for licensing terms -#$URL$ -#$Date$ -#$Revision$ - -__docformat__ = 'reStructuredText' - -from copy import copy - -from reportlab.platypus import * -from reportlab.platypus.doctemplate import * -from reportlab.lib.enums import * - -from opt_imports import Paragraph, NullDraw - -from reportlab.lib.units import * -from reportlab.lib.enums import TA_LEFT, TA_CENTER, TA_RIGHT -from reportlab.platypus.flowables import _listWrapOn, _FUZZ -from reportlab.platypus.tableofcontents import TableOfContents -from reportlab.lib.styles import ParagraphStyle - -import styles -from log import log - -import re -from xml.sax.saxutils import unescape, escape - -class XXPreformatted(XPreformatted): - """An extended XPreformattedFit""" - def __init__(self, *args, **kwargs): - XPreformatted.__init__(self, *args, **kwargs) - - def split (self, aW, aH): - - # Figure out a nice range of splits - # - # Assume we would prefer 5 lines (at least) on - # a splitted flowable before a break, and 4 on - # the last flowable after a break. - # So, the minimum wrap height for a fragment - # will be 5*leading - - rW, rH = self.wrap(aW, aH) - if rH > aH: - - minH1=getattr(self.style, 'allowOrphans', 5)*self.style.leading - minH2=getattr(self.style, 'allowWidows', 4)*self.style.leading - - # If there's no way to fid a decent fragment, - # refuse to split - if aH < minH1: - return [] - - # Now, don't split too close to the end either - pw, ph = self.wrap(aW, aH) - if ph - aH < minH2: - aH = ph - minH2 - - return XPreformatted.split(self, aW, aH) - -class MyIndenter(Indenter): - """An indenter that has a width, because otherwise you get crashes - if added inside tables""" - - width = 0 - height = 0 - - def draw(self): - pass - -class TocEntry(NullDraw): - """A flowable that adds a TOC entry but draws nothing""" - def __init__(self,level,label): - self.level=level - self.label=label - self.width=0 - self.height=0 - self.keepWithNext=True - - def draw(self): - # Add outline entry - self.canv.bookmarkHorizontal(self.label,0,0+self.height) - self.canv.addOutlineEntry(self.label, - self.label, - max(0,int(self.level)), False) - -class Heading(Paragraph): - """A paragraph that also adds an outline entry in - the PDF TOC.""" - - def __init__(self, text, style, bulletText=None, caseSensitive=1, level=0, - snum=None, parent_id=None, node=None, section_header_depth=2): - # Issue 114: need to convert "&" to "&" and such. - # Issue 140: need to make it plain text - self.stext=re.sub(r'<[^>]*?>', '', unescape(text)) - self.stext = self.stext.strip() - self.level = int(level) - self.snum = snum - self.parent_id=parent_id - self.node=node - self.section_header_depth = section_header_depth - Paragraph.__init__(self, text, style, bulletText) - - def draw(self): - - # Add outline entry - self.canv.bookmarkHorizontal(self.parent_id,0,0+self.height) - # self.section_header_depth is for Issue 391 - if self.canv.firstSect and self.level < self.section_header_depth: - self.canv.sectName = self.stext - self.canv.firstSect=False - if self.snum is not None: - self.canv.sectNum = self.snum - else: - self.canv.sectNum = "" - - self.canv.addOutlineEntry(self.stext.encode('utf-8','replace'), - self.parent_id.encode('utf-8','replace'), - int(self.level), False) - Paragraph.draw(self) - -class Separation(Flowable): - """A simple
-like flowable""" - - def wrap(self, w, h): - self.w = w - return w, 1*cm - - def draw(self): - self.canv.line(0, 0.5*cm, self.w, 0.5*cm) - - -class Reference(Flowable): - """A flowable to insert an anchor without taking space""" - - def __init__(self, refid): - self.refid = refid - self.keepWithNext=True - Flowable.__init__(self) - - def wrap(self, w, h): - """This takes no space""" - return 0, 0 - - def draw(self): - self.canv.bookmarkPage(self.refid) - - def repr(self): - return "Reference: %s" % self.refid - - def __str__(self): - return "Reference: %s" % self.refid - -class OddEven(Flowable): - """This flowable takes two lists of flowables as arguments, odd and even. - If will draw the "odd" list when drawn in odd pages and the "even" list on - even pages. - - - wrap() will always return a size large enough for both lists, and this flowable - **cannot** be split, so use with care. - """ - - def __init__(self, odd, even, style=None): - self.odd=DelayedTable([[odd]],['100%'], style) - self.even=DelayedTable([[even]],['100%'], style) - - def wrap(self, w, h): - """Return a box large enough for both odd and even""" - w1,h1=self.odd.wrap(w,h) - w2,h2=self.even.wrap(w,h) - return max(w1,w2), max (h1,h2) - - def drawOn(self, canvas, x, y, _sW=0): - if canvas._pagenum %2 == 0: - self.even.drawOn(canvas, x, y, _sW) - else: - self.odd.drawOn(canvas, x, y, _sW) - - def split(self): - """Makes no sense to split this...""" - return [] - -class DelayedTable(Table): - """A flowable that inserts a table for which it has the data. - - Needed so column widths can be determined after we know on what frame - the table will be inserted, thus making the overal table width correct. - - """ - - def __init__(self, data, colWidths, style=None, repeatrows=False, splitByRow=True): - self.data = data - self._colWidths = colWidths - if style is None: - style = TableStyle([ - ('LEFTPADDING', (0,0), (-1,-1), 0), - ('RIGHTPADDING', (0,0), (-1,-1), 0), - ('TOPPADDING', (0,0), (-1,-1), 0), - ('BOTTOMPADDING', (0,0), (-1,-1), 0), - ]) - self.style = style - self.t = None - self.repeatrows = repeatrows - self.hAlign = TA_CENTER - self.splitByRow=splitByRow - - ## Try to look more like a Table - #self._ncols = 2 - #self._nosplitCmds= [] - #self._nrows= 1 - #self._rowHeights= [None] - #self._spanCmds= [] - #self.ident= None - #self.repeatCols= 0 - #self.repeatRows= 0 - #self.splitByRow= 1 - #self.vAlign= 'MIDDLE' - - def wrap(self, w, h): - # Create the table, with the widths from colWidths reinterpreted - # if needed as percentages of frame/cell/whatever width w is. - - #_tw = w/sum(self.colWidths) - def adjust(*args, **kwargs): - kwargs['total']=w - return styles.adjustUnits(*args, **kwargs) - #adjust=functools.partial(styles.adjustUnits, total=w) - self.colWidths=map(adjust, self._colWidths) - #colWidths = [_w * _tw for _w in self.colWidths] - self.t = Table(self.data, colWidths=self.colWidths, - style=self.style, repeatRows=self.repeatrows, - splitByRow=True) - #splitByRow=self.splitByRow) - self.t.hAlign = self.hAlign - return self.t.wrap(w, h) - - def split(self, w, h): - if self.splitByRow: - if not self.t: - self.wrap(w,h) - return self.t.split(w, h) - else: - return [] - - def drawOn(self, canvas, x, y, _sW=0): - self.t.drawOn(canvas, x, y, _sW) - - def identity(self, maxLen=None): - return "<%s at %s%s%s> containing: %s" % (self.__class__.__name__, - hex(id(self)), self._frameName(), - getattr(self, 'name', '') - and (' name="%s"' % getattr(self, 'name', '')) or '', - unicode(self.data[0])[:180]) - -def tablepadding(padding): - if not isinstance(padding,(list,tuple)): - padding=[padding,]*4 - return padding, ('TOPPADDING',[0,0],[-1,-1],padding[0]),\ - ('RIGHTPADDING',[-1,0],[-1,-1],padding[1]),\ - ('BOTTOMPADDING',[0,0],[-1,-1],padding[2]),\ - ('LEFTPADDING',[1,0],[1,-1],padding[3]) - -class SplitTable(DelayedTable): - def __init__(self, data, colWidths, style, padding=3): - if len(data) <>1 or len(data[0]) <>2: - log.error('SplitTable can only be 1 row and two columns!') - sys.exit(1) - DelayedTable.__init__(self,data,colWidths,style) - self.padding, p1, p2, p3, p4=tablepadding(padding) - self.style._cmds.insert(0,p1) - self.style._cmds.insert(0,p2) - self.style._cmds.insert(0,p3) - self.style._cmds.insert(0,p4) - - def identity(self, maxLen=None): - return "<%s at %s%s%s> containing: %s" % (self.__class__.__name__, - hex(id(self)), self._frameName(), - getattr(self, 'name', '') - and (' name="%s"' % getattr(self, 'name', '')) or '', - unicode(self.data[0][1])[:180]) - - def split(self,w,h): - _w,_h=self.wrap(w, h) - - if _h > h: # Can't split! - # The right column data mandates the split - # Find which flowable exceeds the available height - - dw=self.colWidths[0]+self.padding[1]+self.padding[3] - dh=self.padding[0]+self.padding[2] - - bullet=self.data[0][0] - text=self.data[0][1] - for l in range(0,len(text)): - _,fh = _listWrapOn(text[:l+1],w-dw,None) - if fh+dh > h: - # The lth flowable is the guilty one - # split it - - _,lh=_listWrapOn(text[:l],w-dw,None) - # Workaround for Issue 180 - text[l].wrap(w-dw,h-lh-dh) - l2=text[l].split(w-dw,h-lh-dh) - if l2==[]: # Not splittable, push some to next page - if l==0: # Can't fit anything, push all to next page - return l2 - - # We reduce the number of items we keep on the - # page for two reasons: - # 1) If an item is associated with the following - # item (getKeepWithNext() == True), we have - # to back up to a previous one. - # 2) If we miscalculated the size required on - # the first page (I dunno why, probably not - # counting borders properly, but we do - # miscalculate occasionally). Seems to - # have to do with nested tables, so it might - # be the extra space on the border on the - # inner table. - - while l > 0: - if not text[l-1].getKeepWithNext(): - first_t = Table([ - [bullet, - text[:l]] - ], - colWidths=self.colWidths, - style=self.style) - _w,_h = first_t.wrap(w, h) - if _h <= h: - break - l -= 1 - - if l>0: - # Workaround for Issue 180 with wordaxe: - #if wordaxe is not None: - #l3=[Table([ - #[bullet, - #text[:l]] - #], - #colWidths=self.colWidths, - #style=self.style), - #Table([['',text[l:]]], - #colWidths=self.colWidths, - #style=self.style)] - #else: - l3=[first_t, - SplitTable([['',text[l:]]], - colWidths=self.colWidths, - style=self.style, - padding=self.padding)] - else: # Everything flows - l3=[] - else: - l3=[Table([[bullet,text[:l]+[l2[0]]]], - colWidths=self.colWidths, - rowHeights=[h], - style=self.style)] - if l2[1:]+text[l+1:]: - ## Workaround for Issue 180 with wordaxe: - #if wordaxe is not None: - #l3.append( - #Table([['',l2[1:]+text[l+1:]]], - #colWidths=self.colWidths, - #style=self.style)) - #else: - l3.append( - SplitTable([['',l2[1:]+text[l+1:]]], - colWidths=self.colWidths, - style=self.style, - padding=self.padding)) - return l3 - log.debug("Can't split splittable") - return self.t.split(w, h) - else: - return DelayedTable.split(self,w,h) - - -class MySpacer(Spacer): - def wrap (self, aW, aH): - w, h = Spacer.wrap(self, aW, aH) - self.height = min(aH, h) - return w, self.height - - - -class MyPageBreak(FrameActionFlowable): - - def __init__(self, templateName=None, breakTo='any'): - '''templateName switches the page template starting in the - next page. - - breakTo can be 'any' 'even' or 'odd'. - - 'even' will break one page if the current page is odd - or two pages if it's even. That way the next flowable - will be in an even page. - - 'odd' is the opposite of 'even' - - 'any' is the default, and means it will always break - only one page. - - ''' - - self.templateName = templateName - self.breakTo=breakTo - self.forced=False - self.extraContent=[] - - def frameAction(self, frame): - frame._generated_content = [] - if self.breakTo=='any': # Break only once. None if at top of page - if not frame._atTop: - frame._generated_content.append(SetNextTemplate(self.templateName)) - frame._generated_content.append(PageBreak()) - elif self.breakTo=='odd': #Break once if on even page, twice - #on odd page, none if on top of odd page - if frame._pagenum % 2: #odd pageNum - if not frame._atTop: - # Blank pages get no heading or footer - frame._generated_content.append(SetNextTemplate(self.templateName)) - frame._generated_content.append(SetNextTemplate('emptyPage')) - frame._generated_content.append(PageBreak()) - frame._generated_content.append(ResetNextTemplate()) - frame._generated_content.append(PageBreak()) - else: #even - frame._generated_content.append(SetNextTemplate(self.templateName)) - frame._generated_content.append(PageBreak()) - elif self.breakTo=='even': #Break once if on odd page, twice - #on even page, none if on top of even page - if frame._pagenum % 2: #odd pageNum - frame._generated_content.append(SetNextTemplate(self.templateName)) - frame._generated_content.append(PageBreak()) - else: #even - if not frame._atTop: - # Blank pages get no heading or footer - frame._generated_content.append(SetNextTemplate(self.templateName)) - frame._generated_content.append(SetNextTemplate('emptyPage')) - frame._generated_content.append(PageBreak()) - frame._generated_content.append(ResetNextTemplate()) - frame._generated_content.append(PageBreak()) - -class SetNextTemplate(Flowable): - """Set canv.templateName when drawing. - - rst2pdf uses that to switch page templates. - - """ - - def __init__(self, templateName=None): - self.templateName = templateName - Flowable.__init__(self) - - def draw(self): - if self.templateName: - try: - self.canv.oldTemplateName = self.canv.templateName - except: - self.canv.oldTemplateName = 'oneColumn' - self.canv.templateName = self.templateName - -class ResetNextTemplate(Flowable): - """Go back to the previous template. - - rst2pdf uses that to switch page templates back when - temporarily it needed to switch to another template. - - For example, after a OddPageBreak, there can be a totally - blank page. Those have to use coverPage as a template, - because they must not have headers or footers. - - And then we need to switch back to whatever was used. - - """ - - def __init__(self): - Flowable.__init__(self) - - def draw(self): - self.canv.templateName, self.canv.oldTemplateName = \ - self.canv.oldTemplateName, self.canv.templateName - - def wrap(self, aW, aH): - return 0,0 - -class Transition(Flowable): - """Wrap canvas.setPageTransition. - - Sets the transition effect from the current page to the next. - - """ - - PageTransitionEffects = dict( - Split=['direction', 'motion'], - Blinds=['dimension'], - Box=['motion'], - Wipe=['direction'], - Dissolve=[], - Glitter=['direction']) - - def __init__(self, *args): - if len(args) < 1: - args = [None, 1] # No transition - # See if we got a valid transition effect name - if args[0] not in self.PageTransitionEffects: - log.error('Unknown transition effect name: %s' % args[0]) - args[0] = None - elif len(args) == 1: - args.append(1) - - # FIXME: validate more - self.args = args - - def wrap(self, aw, ah): - return 0, 0 - - def draw(self): - kwargs = dict( - effectname=None, - duration=1, - direction=0, - dimension='H', - motion='I') - ceff = ['effectname', 'duration'] +\ - self.PageTransitionEffects[self.args[0]] - for argname, argvalue in zip(ceff, self.args): - kwargs[argname] = argvalue - kwargs['duration'] = int(kwargs['duration']) - kwargs['direction'] = int(kwargs['direction']) - self.canv.setPageTransition(**kwargs) - - -class SmartFrame(Frame): - """A (Hopefully) smarter frame object. - - This frame object knows how to handle a two-pass - layout procedure (someday). - - """ - - def __init__(self, container, x1, y1, width, height, - leftPadding=6, bottomPadding=6, rightPadding=6, topPadding=6, - id=None, showBoundary=0, overlapAttachedSpace=None, _debug=None): - self.container = container - self.onSidebar = False - self.__s = '[%s, %s, %s, %s, %s, %s, %s, %s,]'\ - %(x1,y1,width,height, - leftPadding, bottomPadding, - rightPadding, topPadding) - Frame.__init__(self, x1, y1, width, height, - leftPadding, bottomPadding, rightPadding, topPadding, - id, showBoundary, overlapAttachedSpace, _debug) - - def add (self, flowable, canv, trySplit=0): - flowable._atTop=self._atTop - return Frame.add(self, flowable, canv, trySplit) - - def __repr__(self): - return self.__s - - def __deepcopy__(self, *whatever): - return copy(self) - -class FrameCutter(FrameActionFlowable): - - def __init__(self, dx, width, flowable, padding, lpad, floatLeft=True): - self.width = width - self.dx = dx - self.f = flowable - self.padding = padding - self.lpad = lpad - self.floatLeft = floatLeft - - def frameAction(self, frame): - idx = frame.container.frames.index(frame) - if self.floatLeft: - # Don't bother inserting a silly thin frame - if self.width-self.padding > 30: - f1 = SmartFrame(frame.container, - frame._x1 + self.dx - 2*self.padding, - frame._y2 - self.f.height - 3*self.padding, - self.width + 2*self.padding, - self.f.height + 3*self.padding, - bottomPadding=0, topPadding=0, leftPadding=self.lpad) - f1._atTop = frame._atTop - # This is a frame next to a sidebar. - f1.onSidebar = True - frame.container.frames.insert(idx + 1, f1) - # Don't add silly thin frame - if frame._height-self.f.height - 2*self.padding > 30: - frame.container.frames.insert(idx + 2, - SmartFrame(frame.container, - frame._x1, - frame._y1p, - self.width + self.dx, - frame._height - self.f.height - 3*self.padding, - topPadding=0)) - else: - # Don't bother inserting a silly thin frame - if self.width-self.padding > 30: - f1 = SmartFrame(frame.container, - frame._x1 - self.width, - frame._y2 - self.f.height - 2*self.padding, - self.width, - self.f.height + 2*self.padding, - bottomPadding=0, topPadding=0, rightPadding=self.lpad) - f1._atTop = frame._atTop - # This is a frame next to a sidebar. - f1.onSidebar = True - frame.container.frames.insert(idx + 1, f1) - if frame._height - self.f.height - 2*self.padding > 30: - frame.container.frames.insert(idx + 2, - SmartFrame(frame.container, - frame._x1 - self.width, - frame._y1p, - self.width + self.dx, - frame._height - self.f.height - 2*self.padding, - topPadding=0)) - - -class Sidebar(FrameActionFlowable): - - def __init__(self, flowables, style): - self.style = style - self.width = self.style.width - self.flowables = flowables - - def frameAction(self, frame): - if self.style.float not in ('left', 'right'): - return - if frame.onSidebar: # We are still on the frame next to a sidebar! - frame._generated_content = [FrameBreak(), self] - else: - w = frame.container.styles.adjustUnits(self.width, frame.width) - idx = frame.container.frames.index(frame) - padding = self.style.borderPadding - width = self.style.width - self.style.padding = frame.container.styles.adjustUnits( - str(padding), frame.width) - self.style.width = frame.container.styles.adjustUnits( - str(width), frame.width) - self.kif = BoxedContainer(self.flowables, self.style) - if self.style.float == 'left': - self.style.lpad = frame.leftPadding - f1 = SmartFrame(frame.container, - frame._x1, - frame._y1p, - w - 2*self.style.padding, - frame._y - frame._y1p, - leftPadding=self.style.lpad, rightPadding=0, - bottomPadding=0, topPadding=0) - f1._atTop = frame._atTop - frame.container.frames.insert(idx+1, f1) - frame._generated_content = [ - FrameBreak(), - self.kif, - FrameCutter(w, - frame.width - w, - self.kif, - padding, - self.style.lpad, - True), - FrameBreak()] - elif self.style.float == 'right': - self.style.lpad = frame.rightPadding - frame.container.frames.insert(idx + 1, - SmartFrame(frame.container, - frame._x1 + frame.width - self.style.width, - frame._y1p, - w, frame._y-frame._y1p, - rightPadding=self.style.lpad, leftPadding=0, - bottomPadding=0, topPadding=0)) - frame._generated_content = [ - FrameBreak(), - self.kif, - FrameCutter(w, - frame.width - w, - self.kif, - padding, - self.style.lpad, - False), - FrameBreak()] - - -class BoundByWidth(Flowable): - """Limit a list of flowables by width. - - This still lets the flowables break over pages and frames. - - """ - - def __init__(self, maxWidth, content=[], style=None, mode=None, scale = None): - self.maxWidth = maxWidth - self.content = content - self.style = style - self.mode = mode - self.pad = None - self.scale = scale - Flowable.__init__(self) - - def border_padding(self, useWidth, additional): - sdict = self.style - sdict = sdict.__dict__ or {} - bp = sdict.get("borderPadding", 0) - if useWidth: - additional += sdict.get("borderWidth", 0) - if not isinstance(bp, list): - bp = [bp] * 4 - return [x + additional for x in bp] - - def identity(self, maxLen=None): - return "<%s at %s%s%s> containing: %s" % (self.__class__.__name__, - hex(id(self)), self._frameName(), - getattr(self, 'name', '') - and (' name="%s"' % getattr(self, 'name', '')) or '', - unicode([c.identity() for c in self.content])[:80]) - - def wrap(self, availWidth, availHeight): - """If we need more width than we have, complain, keep a scale""" - self.pad = self.border_padding(True, 0.1) - maxWidth = float(min( - styles.adjustUnits(self.maxWidth, availWidth) or availWidth, - availWidth)) - self.maxWidth = maxWidth - maxWidth -= (self.pad[1]+self.pad[3]) - self.width, self.height = _listWrapOn(self.content, maxWidth, None) - if self.width > maxWidth: - if self.mode <> 'shrink': - self.scale = 1.0 - log.warning("BoundByWidth too wide to fit in frame (%s > %s): %s", - self.width,maxWidth,self.identity()) - if self.mode == 'shrink' and not self.scale: - self.scale = (maxWidth + self.pad[1]+self.pad[3])/\ - (self.width + self.pad[1]+self.pad[3]) - else: - self.scale = 1.0 - self.height *= self.scale - self.width *= self.scale - return self.width, self.height + (self.pad[0]+self.pad[2])*self.scale - - def split(self, availWidth, availHeight): - if not self.pad: - self.wrap(availWidth, availHeight) - content = self.content - if len(self.content) == 1: - # We need to split the only element we have - content = content[0].split( - availWidth - (self.pad[1]+self.pad[3]), - availHeight - (self.pad[0]+self.pad[2])) - result = [BoundByWidth(self.maxWidth, [f], - self.style, self.mode, self.scale) for f in content] - return result - - def draw(self): - """we simulate being added to a frame""" - canv = self.canv - canv.saveState() - x = canv._x - y = canv._y - _sW = 0 - scale = self.scale - content = None - #, canv, x, y, _sW=0, scale=1.0, content=None, aW=None): - pS = 0 - aW = self.width - aW = scale*(aW + _sW) - if content is None: - content = self.content - y += (self.height + self.pad[2])/scale - x += self.pad[3] - for c in content: - w, h = c.wrapOn(canv, aW, 0xfffffff) - if (w < _FUZZ or h < _FUZZ) and not getattr(c, '_ZEROSIZE', None): - continue - if c is not content[0]: - h += max(c.getSpaceBefore() - pS, 0) - y -= h - canv.saveState() - if self.mode == 'shrink': - canv.scale(scale, scale) - elif self.mode == 'truncate': - p = canv.beginPath() - p.rect(x-self.pad[3], - y-self.pad[2], - self.maxWidth, - self.height + self.pad[0]+self.pad[2]) - canv.clipPath(p, stroke=0) - c.drawOn(canv, x, y, _sW=aW - w) - canv.restoreState() - if c is not content[-1]: - pS = c.getSpaceAfter() - y -= pS - canv.restoreState() - - -class BoxedContainer(BoundByWidth): - - def __init__(self, content, style, mode='shrink'): - try: - w=style.width - except AttributeError: - w='100%' - BoundByWidth.__init__(self, w, content, mode=mode, style=None) - self.style = style - self.mode = mode - - def identity(self, maxLen=None): - return unicode([u"BoxedContainer containing: ", - [c.identity() for c in self.content]])[:80] - - def draw(self): - canv = self.canv - canv.saveState() - x = canv._x - y = canv._y - _sW = 0 - lw = 0 - if self.style and self.style.borderWidth > 0: - lw = self.style.borderWidth - canv.setLineWidth(self.style.borderWidth) - if self.style.borderColor: # This could be None :-( - canv.setStrokeColor(self.style.borderColor) - stroke=1 - else: - stroke=0 - else: - stroke=0 - if self.style and self.style.backColor: - canv.setFillColor(self.style.backColor) - fill=1 - else: - fill=0 - - - padding = self.border_padding(False, lw) - xpadding = padding[1] + padding[3] - ypadding = padding[0] + padding[2] - p = canv.beginPath() - p.rect(x, y, self.width + xpadding, self.height + ypadding) - canv.drawPath(p, stroke=stroke, fill=fill) - canv.restoreState() - BoundByWidth.draw(self) - - def split(self, availWidth, availHeight): - self.wrap(availWidth, availHeight) - padding = (self.pad[1]+self.pad[3])*self.scale - if self.height + padding <= availHeight: - return [self] - else: - # Try to figure out how many elements - # we can put in the available space - candidate = None - remainder = None - for p in range(1, len(self.content)): - b = BoxedContainer(self.content[:p], self.style, self.mode) - w, h = b.wrap(availWidth, availHeight) - if h < availHeight: - candidate = b - if self.content[p:]: - remainder = BoxedContainer(self.content[p:], - self.style, - self.mode) - else: - break - if not candidate or not remainder: # Nothing fits, break page - return [] - if not remainder: # Everything fits? - return [self] - return [candidate, remainder] - - -if reportlab.Version == '2.1': - import reportlab.platypus.paragraph as pla_para - - ################Ugly stuff below - def _do_post_text(i, t_off, tx): - """From reportlab's paragraph.py, patched to avoid underlined links""" - xs = tx.XtraState - leading = xs.style.leading - ff = 0.125*xs.f.fontSize - y0 = xs.cur_y - i*leading - y = y0 - ff - ulc = None - for x1, x2, c in xs.underlines: - if c != ulc: - tx._canvas.setStrokeColor(c) - ulc = c - tx._canvas.line(t_off + x1, y, t_off + x2, y) - xs.underlines = [] - xs.underline = 0 - xs.underlineColor = None - - ys = y0 + 2*ff - ulc = None - for x1, x2, c in xs.strikes: - if c != ulc: - tx._canvas.setStrokeColor(c) - ulc = c - tx._canvas.line(t_off + x1, ys, t_off + x2, ys) - xs.strikes = [] - xs.strike = 0 - xs.strikeColor = None - - yl = y + leading - for x1, x2, link in xs.links: - # This is the bad line - # tx._canvas.line(t_off+x1, y, t_off+x2, y) - _doLink(tx, link, (t_off + x1, y, t_off + x2, yl)) - xs.links = [] - xs.link = None - - # Look behind you! A three-headed monkey! - pla_para._do_post_text.func_code = _do_post_text.func_code - ############### End of the ugly - -class MyTableOfContents(TableOfContents): - """ - Subclass of reportlab.platypus.tableofcontents.TableOfContents - which supports hyperlinks to corresponding sections. - """ - - def __init__(self, *args, **kwargs): - - # The parent argument is to define the locality of - # the TOC. If it's none, it's a global TOC and - # any heading it's notified about is accepted. - - # If it's a node, then the heading needs to be "inside" - # that node. This can be figured out because - # the heading flowable keeps a reference to the title - # node it was creatd from. - # - # Yes, this is gross. - - self.parent=kwargs.pop('parent') - TableOfContents.__init__(self, *args, **kwargs) - # reference ids for which this TOC should be notified - self.refids = [] - # revese lookup table from (level, text) to refid - self.refid_lut = {} - self.linkColor = "#0000ff" - - def notify(self, kind, stuff): - # stuff includes (level, text, pagenum, label) - level, text, pageNum, label, node = stuff - rlabel='-'.join(label.split('-')[:-1]) - - def islocal(_node): - '''See if this node is "local enough" for this TOC. - This is for Issue 196''' - if self.parent is None: - return True - while _node.parent: - if _node.parent == self.parent: - return True - _node=_node.parent - return False - - if rlabel in self.refids and islocal(node): - self.addEntry(level, text, pageNum) - self.refid_lut[(level, text, pageNum)] = label - - def wrap(self, availWidth, availHeight): - """Adds hyperlink to toc entry.""" - - widths = (availWidth - self.rightColumnWidth, - self.rightColumnWidth) - - # makes an internal table which does all the work. - # we draw the LAST RUN's entries! If there are - # none, we make some dummy data to keep the table - # from complaining - if len(self._lastEntries) == 0: - if reportlab.Version <= '2.3': - _tempEntries = [(0, 'Placeholder for table of contents', 0)] - else: - _tempEntries = [(0, 'Placeholder for table of contents', - 0, None)] - else: - _tempEntries = self._lastEntries - - if _tempEntries: - base_level = _tempEntries[0][0] - else: - base_level = 0 - tableData = [] - for entry in _tempEntries: - level, text, pageNum = entry[:3] - left_col_level = level - base_level - if reportlab.Version > '2.3': # For ReportLab post-2.3 - leftColStyle=self.getLevelStyle(left_col_level) - else: # For ReportLab <= 2.3 - leftColStyle = self.levelStyles[left_col_level] - label = self.refid_lut.get((level, text, pageNum), None) - if label: - pre = u'' % (label, self.linkColor) - post = u'' - if not isinstance(text, unicode): - text = unicode(text, 'utf-8') - text = pre + text + post - else: - pre = '' - post = '' - #right col style is right aligned - rightColStyle = ParagraphStyle(name='leftColLevel%d' % left_col_level, - parent=leftColStyle, leftIndent=0, alignment=TA_RIGHT) - leftPara = Paragraph(text, leftColStyle) - rightPara = Paragraph(pre+str(pageNum)+post, rightColStyle) - tableData.append([leftPara, rightPara]) - - self._table = Table(tableData, colWidths=widths, style=self.tableStyle) - - self.width, self.height = self._table.wrapOn(self.canv, availWidth, availHeight) - return self.width, self.height - - def split(self, aW, aH): - # Make sure _table exists before splitting. - # This was only triggered in rare cases using sphinx. - if not self._table: - self.wrap(aW,aH) - return TableOfContents.split(self, aW, aH) - - def isSatisfied(self): - if self._entries == self._lastEntries: - log.debug('Table Of Contents is stable') - return True - else: - if len(self._entries) != len(self._lastEntries): - log.info('Number of items in TOC changed '\ - 'from %d to %d, not satisfied'%\ - (len(self._lastEntries),len(self._entries))) - return False - - log.info('TOC entries that moved in this pass:') - for i in xrange(len(self._entries)): - if self._entries[i] != self._lastEntries[i]: - log.info(str(self._entries[i])) - log.info(str(self._lastEntries[i])) - - return False - diff --git a/dist-packages/rst2pdf/rst2pdf/genelements.py b/dist-packages/rst2pdf/rst2pdf/genelements.py deleted file mode 100644 index 668ebef93..000000000 --- a/dist-packages/rst2pdf/rst2pdf/genelements.py +++ /dev/null @@ -1,966 +0,0 @@ -# -*- coding: utf-8 -*- - -#$URL$ -#$Date$ -#$Revision$ - -# See LICENSE.txt for licensing terms - -# Some fragments of code are copied from Reportlab under this license: -# -##################################################################################### -# -# Copyright (c) 2000-2008, ReportLab Inc. -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without modification, -# are permitted provided that the following conditions are met: -# -# * Redistributions of source code must retain the above copyright notice, -# this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above copyright notice, -# this list of conditions and the following disclaimer in the documentation -# and/or other materials provided with the distribution. -# * Neither the name of the company nor the names of its contributors may be -# used to endorse or promote products derived from this software without -# specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. -# IN NO EVENT SHALL THE OFFICERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED -# TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; -# OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER -# IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING -# IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF -# SUCH DAMAGE. -# -##################################################################################### - - -import os -import tempfile -import re -from copy import copy - -from basenodehandler import NodeHandler - -import docutils.nodes -from oddeven_directive import OddEvenNode -import reportlab - -from aafigure_directive import Aanode - -from log import log, nodeid -from utils import log, parseRaw, parseHTML -from reportlab.platypus import Paragraph, TableStyle -from reportlab.lib.units import cm -from reportlab.lib.enums import TA_LEFT, TA_CENTER, TA_RIGHT -from flowables import Table, DelayedTable, SplitTable, Heading, \ - MyIndenter, MyTableOfContents, MySpacer, \ - Separation, BoxedContainer, BoundByWidth, \ - MyPageBreak, Reference, tablepadding, OddEven, \ - XPreformatted - -from opt_imports import wordaxe, Paragraph, ParagraphStyle - - -class TocBuilderVisitor(docutils.nodes.SparseNodeVisitor): - - def __init__(self, document): - docutils.nodes.SparseNodeVisitor.__init__(self, document) - self.toc = None - # For some reason, when called via sphinx, - # .. contents:: ends up trying to call - # visitor.document.reporter.debug - # so we need a valid document here. - self.document=docutils.utils.new_document('') - - def visit_reference(self, node): - refid = node.attributes.get('refid') - if refid: - self.toc.refids.append(refid) - - -class HandleDocument(NodeHandler, docutils.nodes.document): - pass - -class HandleTable(NodeHandler, docutils.nodes.table): - def gather_elements(self, client, node, style): - if node['classes']: - style = client.styles.combinedStyle(['table']+node['classes']) - else: - style = client.styles['table'] - return [MySpacer(0, client.styles['table'].spaceBefore)] + \ - client.gather_elements(node, style=style) +\ - [MySpacer(0, client.styles['table'].spaceAfter)] - -class HandleTGroup(NodeHandler, docutils.nodes.tgroup): - def gather_elements(self, client, node, style): - - # Take the style from the parent "table" node - # because sometimes it's not passed down. - - if node.parent['classes']: - style = client.styles.combinedStyle(['table']+node.parent['classes']) - else: - style = client.styles['table'] - rows = [] - colWidths = [] - hasHead = False - headRows = 0 - for n in node.children: - if isinstance(n, docutils.nodes.thead): - hasHead = True - for row in n.children: - r = [] - for cell in row.children: - r.append(cell) - rows.append(r) - headRows = len(rows) - elif isinstance(n, docutils.nodes.tbody): - for row in n.children: - r = [] - for cell in row.children: - r.append(cell) - rows.append(r) - elif isinstance(n, docutils.nodes.colspec): - colWidths.append(int(n['colwidth'])) - - # colWidths are in no specific unit, really. Maybe ems. - # Convert them to % - colWidths=map(int, colWidths) - tot=sum(colWidths) - colWidths=["%s%%"%((100.*w)/tot) for w in colWidths] - - if 'colWidths' in style.__dict__: - colWidths[:len(style.colWidths)]=style.colWidths - - spans = client.filltable(rows) - - data = [] - cellStyles = [] - rowids = range(0, len(rows)) - for row, i in zip(rows, rowids): - r = [] - j = 0 - for cell in row: - if isinstance(cell, str): - r.append("") - else: - if i < headRows: - st = client.styles['table-heading'] - else: - st = client.styles['table-body'] - ell = client.gather_elements(cell, style=st) - r.append(ell) - j += 1 - data.append(r) - - st = TableStyle(spans) - if 'commands' in style.__dict__: - for cmd in style.commands: - st.add(*cmd) - else: - # Only use the commands from "table" if the - # specified class has no commands. - - for cmd in client.styles['table'].commands: - st.add(*cmd) - - if hasHead: - for cmd in client.styles.tstyleHead(headRows): - st.add(*cmd) - rtr = client.repeat_table_rows - - t=DelayedTable(data, colWidths, st, rtr) - if style.alignment == TA_LEFT: - t.hAlign='LEFT' - elif style.alignment == TA_CENTER: - t.hAlign='CENTER' - elif style.alignment == TA_RIGHT: - t.hAlign='RIGHT' - return [t] - -class HandleParagraph(NodeHandler, docutils.nodes.paragraph): - def gather_elements(self, client, node, style): - return [Paragraph(client.gen_pdftext(node), style)] - - def get_pre_post(self, client, node, replaceEnt): - pre='' - targets=set(node.get('ids',[])+client.pending_targets) - client.pending_targets=[] - for _id in targets: - if _id not in client.targets: - pre+=''%(_id) - client.targets.append(_id) - return pre, '\n' - - -class HandleTitle(HandleParagraph, docutils.nodes.title): - def gather_elements(self, client, node, style): - # Special cases: (Not sure this is right ;-) - if isinstance(node.parent, docutils.nodes.document): - #node.elements = [Paragraph(client.gen_pdftext(node), - #client.styles['title'])] - # The visible output is now done by the cover template - node.elements = [] - client.doc_title = node.rawsource - client.doc_title_clean = node.astext().strip() - elif isinstance(node.parent, docutils.nodes.topic): - node.elements = [Paragraph(client.gen_pdftext(node), - client.styles['topic-title'])] - elif isinstance(node.parent, docutils.nodes.Admonition): - node.elements = [Paragraph(client.gen_pdftext(node), - client.styles['admonition-title'])] - elif isinstance(node.parent, docutils.nodes.table): - node.elements = [Paragraph(client.gen_pdftext(node), - client.styles['table-title'])] - elif isinstance(node.parent, docutils.nodes.sidebar): - node.elements = [Paragraph(client.gen_pdftext(node), - client.styles['sidebar-title'])] - else: - # Section/Subsection/etc. - text = client.gen_pdftext(node) - fch = node.children[0] - if isinstance(fch, docutils.nodes.generated) and \ - fch['classes'] == ['sectnum']: - snum = fch.astext() - else: - snum = None - key = node.get('refid') - maxdepth=4 - if reportlab.Version > '2.1': - maxdepth=6 - - # The parent ID is the refid + an ID to make it unique for Sphinx - parent_id=(node.parent.get('ids', [None]) or [None])[0]+u'-'+unicode(id(node)) - node.elements = [ Heading(text, - client.styles['heading%d'%min(client.depth, maxdepth)], - level=client.depth-1, - parent_id=parent_id, - node=node, - section_header_depth=client.section_header_depth - )] - if client.depth <= client.breaklevel: - node.elements.insert(0, MyPageBreak(breakTo=client.breakside)) - return node.elements - -class HandleSubTitle(HandleParagraph, docutils.nodes.subtitle): - def gather_elements(self, client, node, style): - if isinstance(node.parent, docutils.nodes.sidebar): - elements = [Paragraph(client.gen_pdftext(node), - client.styles['sidebar-subtitle'])] - elif isinstance(node.parent, docutils.nodes.document): - #elements = [Paragraph(client.gen_pdftext(node), - #client.styles['subtitle'])] - # The visible output is now done by the cover template - elements = [] - # FIXME: looks like subtitles don't have a rawsource like - # titles do. - # That means that literals and italics etc in subtitles won't - # work. - client.doc_subtitle = getattr(node,'rawtext',node.astext()).strip() - else: - elements = node.elements # FIXME Can we get here??? - return elements - -class HandleDocInfo(NodeHandler, docutils.nodes.docinfo): - # A docinfo usually contains several fields. - # We'll render it as a series of elements, one field each. - pass - -class HandleField(NodeHandler, docutils.nodes.field): - def gather_elements(self, client, node, style): - # A field has two child elements, a field_name and a field_body. - # We render as a two-column table, left-column is right-aligned, - # bold, and much smaller - - fn = Paragraph(client.gather_pdftext(node.children[0]) + ":", - style=client.styles['fieldname']) - fb = client.gen_elements(node.children[1], - style=client.styles['fieldvalue']) - t_style=TableStyle(client.styles['field-list'].commands) - return [DelayedTable([[fn, fb]], style=t_style, - colWidths=client.styles['field-list'].colWidths)] - -class HandleDecoration(NodeHandler, docutils.nodes.decoration): - pass - -class HandleHeader(NodeHandler, docutils.nodes.header): - stylename = 'header' - def gather_elements(self, client, node, style): - client.decoration[self.stylename] = client.gather_elements(node, - style=client.styles[self.stylename]) - return [] - -class HandleFooter(HandleHeader, docutils.nodes.footer): - stylename = 'footer' - -class HandleAuthor(NodeHandler, docutils.nodes.author): - def gather_elements(self, client, node, style): - if isinstance(node.parent, docutils.nodes.authors): - # Is only one of multiple authors. Return a paragraph - node.elements = [Paragraph(client.gather_pdftext(node), - style=style)] - if client.doc_author: - client.doc_author += client.author_separator(style=style) \ - + node.astext().strip() - else: - client.doc_author = node.astext().strip() - else: - # A single author: works like a field - fb = client.gather_pdftext(node) - - t_style=TableStyle(client.styles['field-list'].commands) - colWidths=map(client.styles.adjustUnits, - client.styles['field-list'].colWidths) - - node.elements = [Table( - [[Paragraph(client.text_for_label("author", style)+":", - style=client.styles['fieldname']), - Paragraph(fb, style)]], - style=t_style, colWidths=colWidths)] - client.doc_author = node.astext().strip() - return node.elements - -class HandleAuthors(NodeHandler, docutils.nodes.authors): - def gather_elements(self, client, node, style): - # Multiple authors. Create a two-column table. - # Author references on the right. - t_style=TableStyle(client.styles['field-list'].commands) - colWidths = client.styles['field-list'].colWidths - - td = [[Paragraph(client.text_for_label("authors", style)+":", - style=client.styles['fieldname']), - client.gather_elements(node, style=style)]] - return [DelayedTable(td, style=t_style, - colWidths=colWidths)] - -class HandleFList(NodeHandler): - adjustwidths = False - TableType = DelayedTable - def gather_elements(self, client, node, style): - fb = client.gather_pdftext(node) - t_style=TableStyle(client.styles['field-list'].commands) - colWidths=client.styles['field-list'].colWidths - if self.adjustwidths: - colWidths = map(client.styles.adjustUnits, colWidths) - label=client.text_for_label(self.labeltext, style)+":" - t = self.TableType([[Paragraph(label, style=client.styles['fieldname']), - Paragraph(fb, style)]], - style=t_style, colWidths=colWidths) - return [t] - -class HandleOrganization(HandleFList, docutils.nodes.organization): - labeltext = "organization" - -class HandleContact(HandleFList, docutils.nodes.contact): - labeltext = "contact" - -class HandleAddress(HandleFList, docutils.nodes.address): - labeltext = "address" - def gather_elements(self, client, node, style): - fb = client.gather_pdftext(node) - t_style=TableStyle(client.styles['field-list'].commands) - colWidths=client.styles['field-list'].colWidths - if self.adjustwidths: - colWidths = map(client.styles.adjustUnits, colWidths) - label=client.text_for_label(self.labeltext, style)+":" - t = self.TableType([[Paragraph(label, style=client.styles['fieldname']), - XPreformatted(fb, style)] - ], style=t_style, colWidths=colWidths) - return [t] - -class HandleVersion(HandleFList, docutils.nodes.version): - labeltext = "version" - -class HandleRevision(HandleFList, docutils.nodes.revision): - labeltext = "revision" - adjustwidths = True - TableType = Table - -class HandleStatus(HandleFList, docutils.nodes.status): - labeltext = "status" - -class HandleDate(HandleFList, docutils.nodes.date): - labeltext = "date" - -class HandleCopyright(HandleFList, docutils.nodes.copyright): - labeltext = "copyright" - -class HandleTopic(NodeHandler, docutils.nodes.topic): - def gather_elements(self, client, node, style): - # toc - node_classes = node.attributes.get('classes', []) - cstyles = client.styles - if 'contents' in node_classes: - toc_visitor = TocBuilderVisitor(node.document) - if 'local' in node_classes: - toc_visitor.toc = MyTableOfContents(parent=node.parent) - else: - toc_visitor.toc = MyTableOfContents(parent=None) - toc_visitor.toc.linkColor = cstyles.tocColor or cstyles.linkColor - node.walk(toc_visitor) - toc = toc_visitor.toc - toc.levelStyles=[cstyles['toc%d'%l] for l in range(1,15)] - for s in toc.levelStyles: - # FIXME: awful slimy hack! - s.__class__=reportlab.lib.styles.ParagraphStyle - ## Issue 117: add extra TOC levelStyles. - ## 9-deep should be enough. - #for i in range(4): - #ps = toc.levelStyles[-1].__class__(name='Level%d'%(i+5), - #parent=toc.levelStyles[-1], - #leading=toc.levelStyles[-1].leading, - #firstlineIndent=toc.levelStyles[-1].firstLineIndent, - #leftIndent=toc.levelStyles[-1].leftIndent+1*cm) - #toc.levelStyles.append(ps) - - ## Override fontnames (defaults to Times-Roman) - #for levelStyle in toc.levelStyles: - #levelStyle.__dict__['fontName'] = \ - #client.styles['tableofcontents'].fontName - if 'local' in node_classes: - node.elements = [toc] - else: - node.elements = \ - [Paragraph(client.gen_pdftext(node.children[0]), - cstyles['heading1']), toc] - else: - node.elements = client.gather_elements(node, style=style) - return node.elements - -class HandleFieldBody(NodeHandler, docutils.nodes.field_body): - pass - -class HandleSection(NodeHandler, docutils.nodes.section): - def gather_elements(self, client, node, style): - #XXX: should style be passed down here? - client.depth+=1 - elements = client.gather_elements(node) - client.depth-=1 - return elements - -class HandleBulletList(NodeHandler, docutils.nodes.bullet_list): - def gather_elements(self, client, node, style): - - if node ['classes']: - style = client.styles[node['classes'][0]] - else: - style = client.styles["bullet-list"] - - node.elements = client.gather_elements(node, - style=style) - - # Here we need to separate the list from the previous element. - # Calculate by how much: - - sb=style.spaceBefore # list separation - sa=style.spaceAfter # list separation - - node.elements.insert(0, MySpacer(0, sb)) - node.elements.append(MySpacer(0, sa)) - return node.elements - -class HandleDefOrOptList(NodeHandler, docutils.nodes.definition_list, - docutils.nodes.option_list): - pass - -class HandleFieldList(NodeHandler, docutils.nodes.field_list): - def gather_elements(self, client, node, style): - return [MySpacer(0,client.styles['field-list'].spaceBefore)]+\ - client.gather_elements(node, style=style) - -class HandleEnumeratedList(NodeHandler, docutils.nodes.enumerated_list): - def gather_elements(self, client, node, style): - if node ['classes']: - style = client.styles[node['classes'][0]] - else: - style = client.styles["item-list"] - - node.elements = client.gather_elements(node, - style = style) - - # Here we need to separate the list from the previous element. - # Calculate by how much: - - sb=style.spaceBefore # list separation - sa=style.spaceAfter # list separation - - node.elements.insert(0, MySpacer(0, sb)) - node.elements.append(MySpacer(0, sa)) - return node.elements - -class HandleDefinition(NodeHandler, docutils.nodes.definition): - def gather_elements(self, client, node, style): - return client.gather_elements(node, - style = style) - -class HandleOptionListItem(NodeHandler, docutils.nodes.option_list_item): - def gather_elements(self, client, node, style): - optext = ', '.join([client.gather_pdftext(child) - for child in node.children[0].children]) - - desc = client.gather_elements(node.children[1], style) - - t_style = TableStyle(client.styles['option-list'].commands) - colWidths = client.styles['option-list'].colWidths - node.elements = [DelayedTable([[client.PreformattedFit( - optext, client.styles["literal"]), desc]], style = t_style, - colWidths = colWidths)] - return node.elements - -class HandleDefListItem(NodeHandler, docutils.nodes.definition_list_item): - def gather_elements(self, client, node, style): - # I need to catch the classifiers here - tt = [] - dt = [] - ids = [] - for n in node.children: - if isinstance(n, docutils.nodes.term): - for i in n['ids']: # Used by sphinx glossary lists - if i not in client.targets: - ids.append('' % i) - client.targets.append(i) - o, c = client.styleToTags("definition-list-term") - tt.append(o + client.gather_pdftext(n) + c) - elif isinstance(n, docutils.nodes.classifier): - o, c = client.styleToTags("definition-list-classifier") - tt.append(o + client.gather_pdftext(n) + c) - else: - dt.extend(client.gen_elements(n, style)) - - # FIXME: make this configurable from the stylesheet - t_style = TableStyle (client.styles['definition'].commands) - cw = getattr(client.styles['definition'],'colWidths',[]) - - if client.splittables: - node.elements = [ - Paragraph(''.join(ids)+' : '.join(tt), client.styles['definition-list-term']), - SplitTable([['',dt]] , colWidths=cw, style = t_style )] - else: - node.elements = [ - Paragraph(''.join(ids)+' : '.join(tt), client.styles['definition-list-term']), - DelayedTable([['',dt]] , colWidths=[10,None], style = t_style )] - - return node.elements - -class HandleListItem(NodeHandler, docutils.nodes.list_item): - def gather_elements(self, client, node, style): - b, t = client.bullet_for_node(node) - - bStyle = copy(style) - bStyle.alignment = 2 - - # FIXME: use different unicode bullets depending on b - if b and b in "*+-": - b = getattr(bStyle, 'bulletText', u'\u2022') - - # The style has information about the bullet: - # - # bulletFontSize - # bulletFont - # This is so the baselines of the bullet and the text align - extra_space= bStyle.bulletFontSize-bStyle.fontSize - - bStyle.fontSize=bStyle.bulletFontSize - bStyle.fontName=bStyle.bulletFontName - - if t == 'bullet': - item_st=client.styles['bullet-list-item'] - else: - item_st=client.styles['item-list-item'] - - el = client.gather_elements(node, item_st) - # FIXME: this is really really not good code - if not el: - el = [Paragraph(u"\xa0", item_st)] - - - idx=node.parent.children.index(node) - if idx==0: - # The first item in the list, so doesn't need - # separation (it's provided by the list itself) - sb=0 - # It also doesn't need a first-line-indent - fli=0 - else: - # Not the first item, so need to separate from - # previous item. Account for space provided by - # the item's content, too. - sb=item_st.spaceBefore-item_st.spaceAfter - fli=item_st.firstLineIndent - - bStyle.spaceBefore=0 - - t_style = TableStyle(style.commands) - # The -3 here is to compensate for padding, 0 doesn't work :-( - t_style._cmds.extend([ - #["GRID", [ 0, 0 ], [ -1, -1 ], .25, "black" ], - ["BOTTOMPADDING", [ 0, 0 ], [ -1, -1 ], -3 ]] - ) - if extra_space >0: - # The bullet is larger, move down the item text - sb += extra_space - sbb = 0 - else: - # The bullet is smaller, move down the bullet - sbb = -extra_space - - #colWidths = map(client.styles.adjustUnits, - #client.styles['item_list'].colWidths) - colWidths = getattr(style,'colWidths',[]) - while len(colWidths) < 2: - colWidths.append(client.styles['item_list'].colWidths[len(colWidths)]) - - if client.splittables: - node.elements = [MySpacer(0,sb), - SplitTable([[Paragraph(b, style = bStyle), el]], - style = t_style, - colWidths = colWidths) - ] - else: - node.elements = [MySpacer(0,sb), - DelayedTable([[Paragraph(b, style = bStyle), el]], - style = t_style, - colWidths = colWidths) - ] - return node.elements - -class HandleTransition(NodeHandler, docutils.nodes.transition): - def gather_elements(self, client, node, style): - return [Separation()] - - -class HandleBlockQuote(NodeHandler, docutils.nodes.block_quote): - def gather_elements(self, client, node, style): - # This should work, but doesn't look good inside of - # table cells (see Issue 173) - #node.elements = [MyIndenter(left=client.styles['blockquote'].leftIndent)]\ - #+ client.gather_elements( node, style) + \ - #[MyIndenter(left=-client.styles['blockquote'].leftIndent)] - # Workaround for Issue 173 using tables - leftIndent=client.styles['blockquote'].leftIndent - rightIndent=client.styles['blockquote'].rightIndent - spaceBefore=client.styles['blockquote'].spaceBefore - spaceAfter=client.styles['blockquote'].spaceAfter - s=copy(client.styles['blockquote']) - s.leftIndent=style.leftIndent - data=[['',client.gather_elements( node, s)]] - if client.splittables: - node.elements=[MySpacer(0,spaceBefore),SplitTable(data, - colWidths=[leftIndent,None], - style=TableStyle([["TOPPADDING",[0,0],[-1,-1],0], - ["LEFTPADDING",[0,0],[-1,-1],0], - ["RIGHTPADDING",[0,0],[-1,-1],rightIndent], - ["BOTTOMPADDING",[0,0],[-1,-1],0], - ])), MySpacer(0,spaceAfter)] - else: - node.elements=[MySpacer(0,spaceBefore),DelayedTable(data, - colWidths=[leftIndent,None], - style=TableStyle([["TOPPADDING",[0,0],[-1,-1],0], - ["LEFTPADDING",[0,0],[-1,-1],0], - ["RIGHTPADDING",[0,0],[-1,-1],rightIndent], - ["BOTTOMPADDING",[0,0],[-1,-1],0], - ])), MySpacer(0,spaceAfter)] - return node.elements - -class HandleAttribution(NodeHandler, docutils.nodes.attribution): - def gather_elements(self, client, node, style): - return [ - Paragraph(client.gather_pdftext(node), - client.styles['attribution'])] - -class HandleComment(NodeHandler, docutils.nodes.comment): - def gather_elements(self, client, node, style): - # Class that generates no output - return [] - -class HandleLineBlock(NodeHandler, docutils.nodes.line_block): - def gather_elements(self, client, node, style): - if isinstance(node.parent,docutils.nodes.line_block): - qstyle = copy(style) - qstyle.leftIndent += client.styles.adjustUnits("1.5em") - else: - qstyle = copy(client.styles['lineblock']) - # Fix Issue 225: no space betwen line in a lineblock, but keep - # space before the lineblock itself - # Fix Issue 482: nested lineblocks don't need spacing before/after - if not isinstance(node.parent, docutils.nodes.line_block): - return [MySpacer(0,client.styles['lineblock'].spaceBefore)]+client.gather_elements(node, style=qstyle)+[MySpacer(0,client.styles['lineblock'].spaceAfter)] - else: - return client.gather_elements(node, style=qstyle) - -class HandleLine(NodeHandler, docutils.nodes.line): - def gather_elements(self, client, node, style): - # line nodes have no classes, they have to inherit from the outermost lineblock (sigh) - # For more info see Issue 471 and its test case. - - parent = node - while isinstance(parent.parent, (docutils.nodes.line, docutils.nodes.line_block)): - parent=parent.parent - p_class = (parent.get('classes') or ['line'])[0] - qstyle = copy(client.styles[p_class]) - # Indent .5em per indent unit - i=node.__dict__.get('indent',0) - #qstyle = copy(client.styles['line']) - qstyle.leftIndent += client.styles.adjustUnits("0.5em")*i - text = client.gather_pdftext(node) - if not text: # empty line - text=u"\xa0" - return [Paragraph(text, style=qstyle)] - -class HandleLiteralBlock(NodeHandler, docutils.nodes.literal_block, - docutils.nodes.doctest_block): - def gather_elements(self, client, node, style): - if node['classes']: - style = client.styles.combinedStyle(['code']+node['classes']) - else: - style = client.styles['code'] - - return [client.PreformattedFit( - client.gather_pdftext(node, replaceEnt = True), - style )] - - -class HandleFigure(NodeHandler, docutils.nodes.figure): - def gather_elements(self, client, node, style): - - # Either use the figure style or the class - # selected by the user - st_name = 'figure' - if node.get('classes'): - st_name = node.get('classes')[0] - style=client.styles[st_name] - cmd=getattr(style,'commands',[]) - image=node.children[0] - if len(node.children) > 1: - caption = node.children[1] - else: - caption=None - - if len(node.children) > 2: - legend = node.children[2:] - else: - legend=[] - - w=node.get('width',client.styles['figure'].colWidths[0]) - cw=[w,] - sub_elems = client.gather_elements(node, style=None) - t_style=TableStyle(cmd) - table = DelayedTable([[e,] for e in sub_elems],style=t_style, - colWidths=cw) - table.hAlign = node.get('align','CENTER').upper() - return [MySpacer(0, style.spaceBefore),table, - MySpacer(0, style.spaceAfter)] - - -class HandleCaption(NodeHandler, docutils.nodes.caption): - def gather_elements(self, client, node, style): - return [Paragraph(client.gather_pdftext(node), - style=client.styles['figure-caption'])] - -class HandleLegend(NodeHandler, docutils.nodes.legend): - def gather_elements(self, client, node, style): - return client.gather_elements(node, - style=client.styles['figure-legend']) - -class HandleSidebar(NodeHandler, docutils.nodes.sidebar): - def gather_elements(self, client, node, style): - return [BoxedContainer(client.gather_elements(node, style=None), - client.styles['sidebar'])] - -class HandleRubric(NodeHandler, docutils.nodes.rubric): - def gather_elements(self, client, node, style): - # Sphinx uses a rubric as footnote container - if self.sphinxmode and len(node.children) == 1 \ - and node.children[0].astext() == 'Footnotes': - return [] - else: - return [Paragraph(client.gather_pdftext(node), - client.styles['rubric'])] - -class HandleCompound(NodeHandler, docutils.nodes.compound): - # FIXME think if this is even implementable - pass - -class HandleContainer(NodeHandler, docutils.nodes.container): - - def getelements(self, client, node, style): - parent = node.parent - if not isinstance(parent, (docutils.nodes.header, docutils.nodes.footer)): - elems = NodeHandler.getelements(self, client, node, style) - return [elems] - return self.gather_elements(client, node, style) - -class HandleSubstitutionDefinition(NodeHandler, docutils.nodes.substitution_definition): - def gather_elements(self, client, node, style): - return [] - -class HandleTBody(NodeHandler, docutils.nodes.tbody): - def gather_elements(self, client, node, style): - rows = [client.gen_elements(n) for n in node.children] - t = [] - for r in rows: - if not r: - continue - t.append(r) - t_style = TableStyle(client.styles['table'].commands) - colWidths = client.styles['table'].colWidths - return [DelayedTable(t, style=t_style, colWidths=colWidths)] - -class HandleFootnote(NodeHandler, docutils.nodes.footnote, - docutils.nodes.citation): - def gather_elements(self, client, node, style): - # It seems a footnote contains a label and a series of elements - ltext = client.gather_pdftext(node.children[0]) - label = None - ids='' - for i in node.get('ids',[]): - ids+=''%(i) - client.targets.extend(node.get('ids',[ltext])) - - if len(node['backrefs']) > 1 and client.footnote_backlinks: - backrefs = [] - i = 1 - for r in node['backrefs']: - backrefs.append('%d' % ( - r, client.styles.linkColor, i)) - i += 1 - backrefs = '(%s)' % ', '.join(backrefs) - if ltext not in client.targets: - label = Paragraph(ids+'%s'%(ltext + backrefs), - client.styles["endnote"]) - client.targets.append(ltext) - elif len(node['backrefs'])==1 and client.footnote_backlinks: - if ltext not in client.targets: - label = Paragraph(ids+'%s' % ( - node['backrefs'][0], - client.styles.linkColor, - ltext), client.styles["endnote"]) - client.targets.append(ltext) - else: - if ltext not in client.targets: - label = Paragraph(ids+ltext, - client.styles["endnote"]) - client.targets.append(ltext) - if not label: - label = Paragraph(ids+ltext, - client.styles["endnote"]) - contents = client.gather_elements(node, client.styles["endnote"])[1:] - if client.inline_footnotes: - st=client.styles['endnote'] - t_style = TableStyle(st.commands) - colWidths = client.styles['endnote'].colWidths - node.elements = [MySpacer(0, st.spaceBefore), - DelayedTable([[label, contents]], - style=t_style, colWidths=colWidths), - MySpacer(0, st.spaceAfter)] - if client.real_footnotes: - client.mustMultiBuild = True - for e in node.elements: - e.isFootnote=True - else: - client.decoration['endnotes'].append([label, contents]) - node.elements = [] - return node.elements - -class HandleLabel(NodeHandler, docutils.nodes.label): - def gather_elements(self, client, node, style): - return [Paragraph(client.gather_pdftext(node), style)] - -class HandleEntry(NodeHandler, docutils.nodes.entry): - pass - -class HandleRaw(NodeHandler, docutils.nodes.raw): - def gather_elements(self, client, node, style): - # Not really raw, but what the heck - if node.get('format','NONE').lower()=='pdf': - return parseRaw(str(node.astext()), node) - elif client.raw_html and node.get('format','NONE').lower()=='html': - x = parseHTML(str(node.astext()), node) - return x - else: - return [] - def get_text(self, client, node, replaceEnt=True): - return client.gather_pdftext(node, replaceEnt=False if client.raw_html and node.get('format','NONE').lower()=='html' else True) - -class HandleOddEven (NodeHandler, OddEvenNode): - def gather_elements(self, client, node, style): - odd=[] - even=[] - #from pudb import set_trace; set_trace() - if node.children: - if isinstance (node.children[0], docutils.nodes.paragraph): - if node.children[0].get('classes'): - s = client.styles[node.children[0].get('classes')[0]] - else: - s = style - odd=[Paragraph(client.gather_pdftext(node.children[0]), - s)] - else: - # A compound element - odd=client.gather_elements(node.children[0]) - if len(node.children)>1: - if isinstance (node.children[1], docutils.nodes.paragraph): - if node.children[1].get('classes'): - s = client.styles[node.children[1].get('classes')[0]] - else: - s = style - even=[Paragraph(client.gather_pdftext(node.children[1]), - s)] - else: - even=client.gather_elements(node.children[1]) - - return [OddEven(odd=odd, even=even)] - -class HandleAanode(NodeHandler, Aanode): - def gather_elements(self, client, node, style): - style_options = { - 'font': client.styles['aafigure'].fontName, - } - return [node.gen_flowable(style_options)] - -class HandleAdmonition(NodeHandler, docutils.nodes.attention, - docutils.nodes.caution, docutils.nodes.danger, - docutils.nodes.error, docutils.nodes.hint, - docutils.nodes.important, docutils.nodes.note, - docutils.nodes.tip, docutils.nodes.warning, - docutils.nodes.Admonition): - - def gather_elements(self, client, node, style): - if node.children and isinstance(node.children[0], docutils.nodes.title): - title=[] - else: - title= [Paragraph(client.text_for_label(node.tagname, style), - style=client.styles['%s-heading'%node.tagname])] - rows=title + client.gather_elements(node, style=style) - st=client.styles[node.tagname] - if 'commands' in dir(st): - t_style = TableStyle(st.commands) - else: - t_style = TableStyle() - t_style.add("ROWBACKGROUNDS", [0, 0], [-1, -1],[st.backColor]) - t_style.add("BOX", [ 0, 0 ], [ -1, -1 ], st.borderWidth , st.borderColor) - - if client.splittables: - node.elements = [MySpacer(0,st.spaceBefore), - SplitTable([['',rows]], - style=t_style, - colWidths=[0,None], - padding=st.borderPadding), - MySpacer(0,st.spaceAfter)] - else: - padding, p1, p2, p3, p4=tablepadding(padding=st.borderPadding) - t_style.add(*p1) - t_style.add(*p2) - t_style.add(*p3) - t_style.add(*p4) - node.elements = [MySpacer(0,st.spaceBefore), - DelayedTable([['',rows]], - style=t_style, - colWidths=[0,None]), - MySpacer(0,st.spaceAfter)] - return node.elements diff --git a/dist-packages/rst2pdf/rst2pdf/genpdftext.py b/dist-packages/rst2pdf/rst2pdf/genpdftext.py deleted file mode 100644 index 3bd62f69a..000000000 --- a/dist-packages/rst2pdf/rst2pdf/genpdftext.py +++ /dev/null @@ -1,235 +0,0 @@ -# -*- coding: utf-8 -*- - -#$URL$ -#$Date$ -#$Revision$ - -# See LICENSE.txt for licensing terms - -import os -from xml.sax.saxutils import escape -from log import log, nodeid -from basenodehandler import NodeHandler -import docutils.nodes -from urlparse import urljoin, urlparse -from reportlab.lib.units import cm -from opt_imports import Paragraph - -from image import MyImage, missing -from flowables import MySpacer - -class FontHandler(NodeHandler): - def get_pre_post(self, client, node, replaceEnt): - return self.get_font_prefix(client, node, replaceEnt), '
' - - def get_font_prefix(self, client, node, replaceEnt): - return client.styleToFont(self.fontstyle) - -class HandleText(NodeHandler, docutils.nodes.Text): - def gather_elements(self, client, node, style): - return [Paragraph(client.gather_pdftext(node), style)] - - def get_text(self, client, node, replaceEnt): - text = node.astext() - if replaceEnt: - text = escape(text) - return text - -class HandleStrong(NodeHandler, docutils.nodes.strong): - pre = "" - post = "" - -class HandleEmphasis(NodeHandler, docutils.nodes.emphasis): - pre = "" - post = "" - -class HandleLiteral(NodeHandler, docutils.nodes.literal): - def get_pre_post(self, client, node, replaceEnt): - - if node['classes']: - pre = client.styleToFont(node['classes'][0]) - else: - pre = client.styleToFont('literal') - post = "
" - if not client.styles['literal'].hyphenation: - pre = '' + pre - post += '' - return pre, post - - def get_text(self, client, node, replaceEnt): - text = node.astext() - text = escape(node.astext()) - text = text.replace(' ', ' ') - return text - -class HandleSuper(NodeHandler, docutils.nodes.superscript): - pre = '' - post = "" - -class HandleSub(NodeHandler, docutils.nodes.subscript): - pre = '' - post = "" - -class HandleTitleReference(FontHandler, docutils.nodes.title_reference): - fontstyle = 'title_reference' - -class HandleReference(NodeHandler, docutils.nodes.reference): - def get_pre_post(self, client, node, replaceEnt): - pre, post = '', '' - uri = node.get('refuri') - if uri: - # Issue 366: links to "#" make no sense in a PDF - if uri =="#": - return "", "" - if uri.startswith ('#'): - pass - elif client.baseurl: # Need to join the uri with the base url - uri = urljoin(client.baseurl, uri) - - if urlparse(uri)[0] and client.inlinelinks: - # external inline reference - if uri in [node.astext(),"mailto:"+node.astext()]: - # No point on repeating it - post = u'' - elif uri.startswith('http://') or uri.startswith('ftp://'): - post = u' (%s)' % uri - elif uri.startswith('mailto:'): - #No point on showing "mailto:" - post = u' (%s)' % uri[7:] - else: - # A plain old link - pre += u'' %\ - (uri, client.styles.linkColor) - post = '' + post - else: - uri = node.get('refid') - if uri: - pre += u'' %\ - (uri, client.styles.linkColor) - post = '' + post - return pre, post - -class HandleOptions(HandleText, docutils.nodes.option_string, docutils.nodes.option_argument): - pass - -class HandleSysMessage(HandleText, docutils.nodes.system_message, docutils.nodes.problematic): - pre = '' - post = "" - - def gather_elements(self, client, node, style): - # FIXME show the error in the document, red, whatever - # log.warning("Problematic node %s", node.astext()) - return [] - - -class HandleGenerated(HandleText, docutils.nodes.generated): - pass -# def get_text(self, client, node, replaceEnt): -# if 'sectnum' in node['classes']: -# # This is the child of a title with a section number -# # Send the section number up to the title node -# node.parent['_sectnum'] = node.astext() -# return node.astext() - -class HandleImage(NodeHandler, docutils.nodes.image): - def gather_elements(self, client, node, style): - # FIXME: handle alt - - target = None - if isinstance(node.parent, docutils.nodes.reference): - target = node.parent.get('refuri', None) - st_name = 'image' - if node.get('classes'): - st_name = node.get('classes')[0] - style=client.styles[st_name] - uri = str(node.get("uri")) - if uri.split("://")[0].lower() not in ('http','ftp','https'): - imgname = os.path.join(client.basedir,uri) - else: - imgname = uri - try: - w, h, kind = MyImage.size_for_node(node, client=client) - except ValueError: - # Broken image, return arbitrary stuff - imgname=missing - w, h, kind = 100, 100, 'direct' - node.elements = [ - MyImage(filename=imgname, height=h, width=w, - kind=kind, client=client, target=target)] - alignment = node.get('align', '').upper() - if not alignment: - # There is no JUSTIFY for flowables, of course, so 4:LEFT - alignment = {0:'LEFT', 1:'CENTER', 2:'RIGHT', 4:'LEFT'}[style.alignment] - if not alignment: - alignment = 'CENTER' - node.elements[0].image.hAlign = alignment - node.elements[0].spaceBefore = style.spaceBefore - node.elements[0].spaceAfter = style.spaceAfter - - # Image flowables don't support valign (makes no sense for them?) - # elif alignment in ('TOP','MIDDLE','BOTTOM'): - # i.vAlign = alignment - return node.elements - - def get_text(self, client, node, replaceEnt): - # First see if the image file exists, or else, - # use image-missing.png - imgname = os.path.join(client.basedir,str(node.get("uri"))) - try: - w, h, kind = MyImage.size_for_node(node, client=client) - except ValueError: - # Broken image, return arbitrary stuff - imgname=missing - w, h, kind = 100, 100, 'direct' - - alignment=node.get('align', 'CENTER').lower() - if alignment in ('top', 'middle', 'bottom'): - align='valign="%s"'%alignment - else: - align='' - # TODO: inline images don't support SVG, vectors and PDF, - # which may be surprising. So, work on converting them - # previous to passing to reportlab. - # Try to rasterize using the backend - w, h, kind = MyImage.size_for_node(node, client=client) - uri=MyImage.raster(imgname, client) - return ''%\ - (uri, w, h, align) - -class HandleFootRef(NodeHandler, docutils.nodes.footnote_reference,docutils.nodes.citation_reference): - def get_text(self, client, node, replaceEnt): - # TODO: when used in Sphinx, all footnotes are autonumbered - anchors='' - for i in node.get('ids'): - if i not in client.targets: - anchors+='' % i - client.targets.append(i) - return u'%s%s'%\ - (anchors, '#' + node.get('refid',node.astext()), - client.styles.linkColor, node.astext()) - -class HandleTarget(NodeHandler, docutils.nodes.target): - def gather_elements(self, client, node, style): - if 'refid' in node: - client.pending_targets.append(node['refid']) - return client.gather_elements(node, style) - - def get_text(self, client, node, replaceEnt): - text = client.gather_pdftext(node) - if replaceEnt: - text = escape(text) - return text - - def get_pre_post(self, client, node, replaceEnt): - pre = '' - if node['ids'][0] not in client.targets: - pre = u'' % node['ids'][0] - client.targets.append(node['ids'][0]) - return pre, '' - -class HandleInline(NodeHandler, docutils.nodes.inline): - def get_pre_post(self, client, node, replaceEnt): - r = client.styleToTags(node['classes'][0]) - if r: - return r - return '', '' diff --git a/dist-packages/rst2pdf/rst2pdf/image.py b/dist-packages/rst2pdf/rst2pdf/image.py deleted file mode 100644 index 7a875f082..000000000 --- a/dist-packages/rst2pdf/rst2pdf/image.py +++ /dev/null @@ -1,491 +0,0 @@ -# -*- coding: utf-8 -*- - -import os -from os.path import abspath, dirname, expanduser, join -import sys -import tempfile -from copy import copy -from reportlab.platypus.flowables import Image, Flowable -from log import log, nodeid -from reportlab.lib.units import * -import glob -import urllib - -from opt_imports import LazyImports - -from svgimage import SVGImage - -# This assignment could be overridden by an extension module -VectorPdf = None - -# find base path -if hasattr(sys, 'frozen'): - PATH = abspath(dirname(sys.executable)) -else: - PATH = abspath(dirname(__file__)) - -missing = os.path.join(PATH, 'images', 'image-missing.jpg') - -def defaultimage(filename, width=None, height=None, kind='direct', - mask="auto", lazy=1, srcinfo=None): - ''' We have multiple image backends, including the stock reportlab one. - This wrapper around the reportlab one allows us to pass the client - RstToPdf object and the uri into all our backends, which they can - use or not as necessary. - ''' - return Image(filename, width, height, kind, mask, lazy) - -class MyImage (Flowable): - """A Image subclass that can: - - 1. Take a 'percentage_of_container' kind, - which resizes it on wrap() to use... well, a percentage of the - container's width. - - 2. Take vector formats and instantiates the right "backend" flowable - - """ - - warned = False - - @classmethod - def support_warning(cls): - if cls.warned or LazyImports.PILImage: - return - cls.warned = True - log.warning("Support for images other than JPG," - " is now limited. Please install PIL.") - - @staticmethod - def split_uri(uri): - ''' A really minimalistic split -- doesn't cope with http:, etc. - HOWEVER, it tries to do so in a fashion that allows a clueless - user to have '#' inside his filename without screwing anything - up. - ''' - basename, extra = os.path.splitext(uri) - extra = extra.split('#', 1) + [''] - fname = basename + extra[0] - extension = extra[0][1:].lower() - options = extra[1] - return fname, extension, options - - def __init__(self, filename, width=None, height=None, - kind='direct', mask="auto", lazy=1, client=None, target=None): - # Client is mandatory. Perhaps move it farther up if we refactor - assert client is not None - self.__kind=kind - - if filename.split("://")[0].lower() in ('http','ftp','https'): - try: - filename2, _ = urllib.urlretrieve(filename) - if filename != filename2: - client.to_unlink.append(filename2) - filename = filename2 - except IOError: - filename = missing - self.filename, self._backend=self.get_backend(filename, client) - srcinfo = client, self.filename - - if kind == 'percentage_of_container': - self.image=self._backend(self.filename, width, height, - 'direct', mask, lazy, srcinfo) - self.image.drawWidth=width - self.image.drawHeight=height - self.__width=width - self.__height=height - else: - self.image=self._backend(self.filename, width, height, - kind, mask, lazy, srcinfo) - self.__ratio=float(self.image.imageWidth)/self.image.imageHeight - self.__wrappedonce=False - self.target = target - - @classmethod - def raster(self, filename, client): - """Takes a filename and converts it to a raster image - reportlab can process""" - - if not os.path.exists(filename): - log.error("Missing image file: %s",filename) - return missing - - try: - # First try to rasterize using the suggested backend - backend = self.get_backend(filename, client)[1] - return backend.raster(filename, client) - except: - pass - - # Last resort: try everything - - - PILImage = LazyImports.PILImage - - if PILImage: - ext='.png' - else: - ext='.jpg' - - extension = os.path.splitext(filename)[-1][1:].lower() - - if PILImage: # See if pil can process it - try: - PILImage.open(filename) - return filename - except: - # Can't read it - pass - - # PIL can't or isn't here, so try with Magick - - PMImage = LazyImports.PMImage - if PMImage: - try: - img = PMImage() - # Adjust density to pixels/cm - dpi=client.styles.def_dpi - img.density("%sx%s"%(dpi,dpi)) - img.read(str(filename)) - _, tmpname = tempfile.mkstemp(suffix=ext) - img.write(tmpname) - client.to_unlink.append(tmpname) - return tmpname - except: - # Magick couldn't - pass - elif PILImage: - # Try to use gfx, which produces PNGs, and then - # pass them through PIL. - # This only really matters for PDFs but it's worth trying - gfx = LazyImports.gfx - try: - # Need to convert the DPI to % where 100% is 72DPI - gfx.setparameter( "zoom", str(client.styles.def_dpi/.72)) - if extension == 'pdf': - doc = gfx.open("pdf", filename) - elif extension == 'swf': - doc = gfx.open("swf", filename) - else: - doc = None - if doc: - img = gfx.ImageList() - img.setparameter("antialise", "1") # turn on antialising - page = doc.getPage(1) - img.startpage(page.width,page.height) - page.render(img) - img.endpage() - _, tmpname = tempfile.mkstemp(suffix='.png') - img.save(tmpname) - client.to_unlink.append(tmpname) - return tmpname - except: # Didn't work - pass - - # PIL can't and Magick can't, so we can't - self.support_warning() - log.error("Couldn't load image [%s]"%filename) - return missing - - - @classmethod - def get_backend(self, uri, client): - '''Given the filename of an image, returns (fname, backend) - where fname is the filename to be used (could be the same as - filename, or something different if the image had to be converted - or is missing), and backend is an Image class that can handle - fname. - - - If uri ensd with '.*' then the returned filename will be the best - quality supported at the moment. - - That means: PDF > SVG > anything else - - ''' - - backend = defaultimage - - - # Extract all the information from the URI - filename, extension, options = self.split_uri(uri) - - if '*' in filename: - preferred=['gif','jpg','png'] - if SVGImage.available(): - preferred.append('svg') - preferred.append('pdf') - - # Find out what images are available - available = glob.glob(filename) - cfn=available[0] - cv=-10 - for fn in available: - ext=fn.split('.')[-1] - if ext in preferred: - v=preferred.index(ext) - else: - v=-1 - if v > cv: - cv=v - cfn=fn - # cfn should have our favourite type of - # those available - filename = cfn - extension = cfn.split('.')[-1] - uri = filename - - # If the image doesn't exist, we use a 'missing' image - if not os.path.exists(filename): - log.error("Missing image file: %s",filename) - filename = missing - - if extension in ['svg','svgz']: - if SVGImage.available(): - log.info('Backend for %s is SVGIMage'%filename) - backend=SVGImage - else: - filename = missing - - elif extension in ['pdf']: - if VectorPdf is not None and filename is not missing: - backend = VectorPdf - filename = uri - - # PDF images are implemented by converting via PythonMagick - # w,h are in pixels. I need to set the density - # of the image to the right dpi so this - # looks decent - elif LazyImports.PMImage or LazyImports.gfx: - filename=self.raster(filename, client) - else: - log.warning("Minimal PDF image support "\ - "requires PythonMagick or the vectorpdf extension [%s]", filename) - filename = missing - elif extension != 'jpg' and not LazyImports.PILImage: - if LazyImports.PMImage: - # Need to convert to JPG via PythonMagick - filename=self.raster(filename, client) - else: - # No way to make this work - log.error('To use a %s image you need PIL installed [%s]',extension,filename) - filename=missing - return filename, backend - - - @classmethod - def size_for_node(self, node, client): - '''Given a docutils image node, returns the size the image should have - in the PDF document, and what 'kind' of size that is. - That involves lots of guesswork''' - - uri = str(node.get("uri")) - if uri.split("://")[0].lower() not in ('http','ftp','https'): - uri = os.path.join(client.basedir,uri) - else: - uri, _ = urllib.urlretrieve(uri) - client.to_unlink.append(uri) - - srcinfo = client, uri - # Extract all the information from the URI - imgname, extension, options = self.split_uri(uri) - - if not os.path.isfile(imgname): - imgname = missing - - scale = float(node.get('scale', 100))/100 - size_known = False - - # Figuring out the size to display of an image is ... annoying. - # If the user provides a size with a unit, it's simple, adjustUnits - # will return it in points and we're done. - - # However, often the unit wil be "%" (specially if it's meant for - # HTML originally. In which case, we will use a percentage of - # the containing frame. - - # Find the image size in pixels: - kind = 'direct' - xdpi, ydpi = client.styles.def_dpi, client.styles.def_dpi - extension = imgname.split('.')[-1].lower() - if extension in ['svg','svgz'] and SVGImage.available(): - iw, ih = SVGImage(imgname, srcinfo=srcinfo).wrap(0, 0) - # These are in pt, so convert to px - iw = iw * xdpi / 72 - ih = ih * ydpi / 72 - - elif extension == 'pdf': - if VectorPdf is not None: - xobj = VectorPdf.load_xobj(srcinfo) - iw, ih = xobj.w, xobj.h - else: - pdf = LazyImports.pdfinfo - if pdf is None: - log.warning('PDF images are not supported without pyPdf or pdfrw [%s]', nodeid(node)) - return 0, 0, 'direct' - reader = pdf.PdfFileReader(open(imgname, 'rb')) - box = [float(x) for x in reader.getPage(0)['/MediaBox']] - iw, ih = x2 - x1, y2 - y1 - # These are in pt, so convert to px - iw = iw * xdpi / 72.0 - ih = ih * ydpi / 72.0 - size_known = True # Assume size from original PDF is OK - - else: - keeptrying = True - if LazyImports.PILImage: - try: - img = LazyImports.PILImage.open(imgname) - img.load() - iw, ih = img.size - xdpi, ydpi = img.info.get('dpi', (xdpi, ydpi)) - keeptrying = False - except IOError: # PIL throws this when it's a broken/unknown image - pass - if keeptrying and LazyImports.PMImage: - img = LazyImports.PMImage(imgname) - iw = img.size().width() - ih = img.size().height() - density=img.density() - # The density is in pixelspercentimeter (!?) - xdpi=density.width()*2.54 - ydpi=density.height()*2.54 - keeptrying = False - if keeptrying: - if extension not in ['jpg', 'jpeg']: - log.error("The image (%s, %s) is broken or in an unknown format" - , imgname, nodeid(node)) - raise ValueError - else: - # Can be handled by reportlab - log.warning("Can't figure out size of the image (%s, %s). Install PIL for better results." - , imgname, nodeid(node)) - iw = 1000 - ih = 1000 - - # Try to get the print resolution from the image itself via PIL. - # If it fails, assume a DPI of 300, which is pretty much made up, - # and then a 100% size would be iw*inch/300, so we pass - # that as the second parameter to adjustUnits - # - # Some say the default DPI should be 72. That would mean - # the largest printable image in A4 paper would be something - # like 480x640. That would be awful. - # - - w = node.get('width') - h = node.get('height') - if h is None and w is None: # Nothing specified - # Guess from iw, ih - log.debug("Using image %s without specifying size." - "Calculating based on image size at %ddpi [%s]", - imgname, xdpi, nodeid(node)) - w = iw*inch/xdpi - h = ih*inch/ydpi - elif w is not None: - # Node specifies only w - # In this particular case, we want the default unit - # to be pixels so we work like rst2html - if w[-1] == '%': - kind = 'percentage_of_container' - w=int(w[:-1]) - else: - # This uses default DPI setting because we - # are not using the image's "natural size" - # this is what LaTeX does, according to the - # docutils mailing list discussion - w = client.styles.adjustUnits(w, client.styles.tw, - default_unit='px') - - if h is None: - # h is set from w with right aspect ratio - h = w*ih/iw - else: - h = client.styles.adjustUnits(h, ih*inch/ydpi, default_unit='px') - elif h is not None and w is None: - if h[-1] != '%': - h = client.styles.adjustUnits(h, ih*inch/ydpi, default_unit='px') - - # w is set from h with right aspect ratio - w = h*iw/ih - else: - log.error('Setting height as a percentage does **not** work. '\ - 'ignoring height parameter [%s]', nodeid(node)) - # Set both from image data - w = iw*inch/xdpi - h = ih*inch/ydpi - - # Apply scale factor - w = w*scale - h = h*scale - - # And now we have this probably completely bogus size! - log.info("Image %s size calculated: %fcm by %fcm [%s]", - imgname, w/cm, h/cm, nodeid(node)) - - return w, h, kind - - def _restrictSize(self,aW,aH): - return self.image._restrictSize(aW, aH) - - def _unRestrictSize(self,aW,aH): - return self.image._unRestrictSize(aW, aH) - - def _unRestrictSize(self): - return self.image._unRestrictSize() - - def __deepcopy__(self, *whatever): - # ImageCore class is not deep copyable. Stop the copy at this - # class. If you remove this, re-test for issue #126. - return copy(self) - - def wrap(self, availWidth, availHeight): - if self.__kind=='percentage_of_container': - w, h= self.__width, self.__height - if not w: - log.warning('Scaling image as % of container with w unset.' - 'This should not happen, setting to 100') - w = 100 - scale=w/100. - w = availWidth*scale - h = w/self.__ratio - self.image.drawWidth, self.image.drawHeight = w, h - return w, h - else: - if self.image.drawHeight > availHeight: - if not getattr(self, '_atTop', True): - return self.image.wrap(availWidth, availHeight) - else: - # It's the first thing in the frame, probably - # Wrapping it will not make it work, so we - # adjust by height - # FIXME get rst file info (line number) - # here for better error message - log.warning('image %s is too tall for the '\ - 'frame, rescaling'%\ - self.filename) - self.image.drawHeight = availHeight - self.image.drawWidth = availHeight*self.__ratio - elif self.image.drawWidth > availWidth: - log.warning('image %s is too wide for the frame, rescaling'%\ - self.filename) - self.image.drawWidth = availWidth - self.image.drawHeight = availWidth / self.__ratio - return self.image.wrap(availWidth, availHeight) - - def drawOn(self, canv, x, y, _sW=0): - if self.target: - offset = 0 - if self.image.hAlign == 'CENTER': - offset = _sW / 2. - elif self.image.hAlign == 'RIGHT': - offset = _sW - canv.linkURL(self.target, - ( - x + offset, y, - x + offset + self.image.drawWidth, - y + self.image.drawHeight), - relative = True, - #thickness = 3, - ) - return self.image.drawOn(canv, x, y, _sW) diff --git a/dist-packages/rst2pdf/rst2pdf/images/image-missing.jpg b/dist-packages/rst2pdf/rst2pdf/images/image-missing.jpg deleted file mode 100644 index 447a5bf53385f61a86844fbd87d4da3f15193e1c..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 5101 zcmb7HcTiK?*G)+1(n6CW(tGbnFA9VnLJdLb9R&nL0i_E{PiRu4gboTMv)GE-+l6p{1v#rlw<}qi0}bWMZVb0AYqO zGM{tC-y*=@Jwafq^Nx(P)U@Zd{}E@s046HH1Hc0ikRL$C1Ozbw&jtZJ000m~4g>=J z1Py?U8U!Y%q@wt3w_*eU$-rb_3R*gHa_ZlWfMg&5n4E%=i3-9e$E>Pr6^^53;oz6I za|x^RM3OL?Sr#&N~c-bLNc@2UEQYk6!i^a;>w!)*3Kol&%K@hiNCo7P*4KN z$<8NtZdGCW?GOZ_rUZljH>UH}ATSdjIfWcVH;9E_-pVB=vu}@*nN1KDT*~SiJ0;Mx zcD4w(aIQ$k1Y!bc1JJ6E=|?C%Xif&W4eM5&(;8tFTkkltX8yp;E|df@FSM>>HkX9_ zt73uvUCw$l{bD4rVrAT;?C+f8l6R&zM2y%O>|cNpcAe=}Ug_G`yif78e^KUKO--%* zAE>g>LyUAifSVUUomd%GdiXwlr|#!!lMTf(DN0=<;#k4(s}b@qT1c9(_@#6P*tu9M zyd9l)1~|F^sinPFeH?b@;LtPuW5yqU(#$Jmz9H{#-nSfxCv6jpPqwIS$S?h+^f%F> zOa+z0s-7Q7)X6U@y1x-1wbbf=%KTkXy{Jj)FgN-~diVdnu{OHwp_k1kd7>Xr{mS|q z6$x(@?#cN4Mh@oiBU|>B^(RK#3|$9-NH%!jSM3u~yA=^vO%bxglwh=)%JsRNGNhC; zA?{J}_%O4K>b$6jlc=ai)+l2mlN4|1%oQhtlHOVGqL(EU$2x7DLl1>i`7M5Pb@4KrHZ{0TEu!xJ| zMJ&4xLJH|TlVAJn-K?O!S8pF{9XtB3<7OGJQX4)4q&#y-Pd2qFu;_nt%HMq}D^FTX zNN!cmi%YVeuUxz)7!FIQ(QUn`BN4jI5F5{@Rs+@OjYhb*g-K1x-o+Po)Q(xfm}>{5 zQoVRW4F&nA-@;HjJW+orq={4(3`KWs39sw-dv4^-I$=BK8pk;j0)0x`Wn}2aH@7JC zHGV2&7nbCWwVzTt>~oPyWIPU1vUzUg^!Z?LLC1*p=^GE((!%oIe6d;<< zTrAn5KAN>Gu^L*BG=dqI1OiF*X#eF_=^BM*Y!$1Vx~6TtsYJii&tueLy4dQ284$ix z$wqvAklUuN`{wF6UB~QvNKAg?1Vk|3P8sX#(WZ{<-3e&kh#CF5*od&c>1cD zRlTQ3D+e{BHX1Sx05vP5VU>e(hq0M_SUBWiLgW+GncYCc~AN|Ev`t~iR= ztPxO`yoZ+y*sj|~U2$#l+py_Rkd4Kv*n~1SK-8!#%_omq6{B(=+OgV}!pi12vI>5o zw#j@+Z;NN5-0s3yfa2U3wRB~|lq+GZ{oW|HNSf+sd8NbH_(pq|NIo*J!F!u*POg`W zjJmxC6?9@$n<;sDn2Vjm+iX_Kw=Qril-mdSAyx9G9x)7~pO@y=9>>Ma&;RXfL)bz` z!Jvjl-Yy~yscQ`zu`O4sOeBsy^BCZw_nrp{fxUChC8W$~+2 z6(rb^MU4eb#_+_`O>_Z25uDQW_14d{rY)6QmW4yP=8Ogr!ASC#xieMoLnQ;IZauw4 z(bt>s{OE!j_TDPm14Fh~(X~lDcC5DJ?G&MAj_sGdZ{G z3&xhWUs(H`9{Ltg zO?*szMg-|CPWL~j+gO%lW1wp4S9TCS&y59#7ZdbgXwl7&A8GI~5JaW7H~JUA$(dhh1*YwhkUDvG?l0X-QU4%NWtuDKJu`v)8Y@RDRE}tuzD6{~i zQ=FgICj1D`YpZt(^(&~~Jp3?WWWO|Eqn{9}KbhU} z-%08>{GN*HjQpc)z_gGZx954SH@^*o$Uvi`7NJML8R#V!KSE3aIxBxtvr68a=%%w) zDKxb$2f=5T)`kxI%o#jM3?(IIG0T=puQL{orod@JU@67`I;%?qflzOytqQhpN%rYT z6WF(2g1w^Ab9;TKx%rg@H18aA=IN4InM{xObXFD%eZNAIkmlaf7#lXpU`XEPb1~84 zEqgLZZadkrB4gnnn-AwWbNlq_8zZ)F_D$d_F!Udr&v{DtxV>yPZ2&2_zw9QJWQI8 zUJN8+oBPUW87B=9cy_{V|HvFOi9GZj(#3LQOCkKF9K>9m3aWbQ9(>b!aC zD*A&Mr>y1RwksIXdYf7He-$6-(hf$)NCIoUCQoealFMEVW?A#ld-7FZa5p&us zPoZ&YvK!%iPpRcv$~Zi%>hvX~kC%D%L;5)SiwxhFM1(ts+vHXnIhMz8D2ylf?hVs* zZo=C~Nk@i4yAk26GTN|QXOrrBZnTVa>FMXpK)O?^g|!fgwqH}$QJgiovv?_Nd`m+u zMQ;X+X@vU=1)KZ;DtEe=%8NdlJIwo*@#%+dw^xOMujj`@PCr}Taqx|-i|JhUXllN` z4IPsRF6R9l$W$2p2` zDoy02v3Nw(bJFz>Sf66K@axg|p*NzNyG=gI`|!=qE#J?8uu{qAmDwfAetCe1bj6@+ zX>hNdymG(i12jPvPN-eW%zQp{%~6T0$&Eb1d&-oV$2Q&KxdAq9_WAWJDl)6ew#!4j_Rr*PD>hfkZ4#}|0L(HYb~#sH#>^TM9`_{=acv;nlmYYh3%W?Q zj}@(7q4drB#sRZPi`b1o!2~|HZNWHf>fpm!S9?E^1xxQQrTd!2u{jtVaO}T!J3i6I(+Rv4;nmA9pK4U#y%ejSeJw!5+m^h+0!!ZQ zM|42lj5E%v5gW{4h=(9(#tpVYldS}^j0wZZLs8%$$GInWU1OxA?pa$Z_*gSv!r166 z*E&OKWj3{}lXW)WX|aWZ=IS8YoTj~2Gm_CtnC;C@shB{DrE*~Ipt>ZoZjT6GotU?V;=f8^@% zEuN2IE*(B5fuCwR*JJhlEemsuC&PPsVDY@!n-1QuE^ZQYb_L_*a z<=Xnl6%L~dO)Y3_{l2Hlu-o$E^%olh&;DhL{8W1uZV{fI~hk#t0DQm4? zyw)<&-that>kugAR0~VC(%AOY=et;_+Hb1atL|;xu;XVcDB5{2YK2I@5-Gy%*a9O( zz{|{gRe&7v>om22IiiOh?`IB7#u_Fqla)CV6&O6mokO+sn`|6flnGnOMQy2hZe?$F z{h2j?y{nPYTIA7N4i(jdv*5%Wz%gCfBLaQumd_OD{NalRvGN3ET!Pxq#J)3tLP?f@ zSF=Es;OiaVi=YJDkS}A6UrxE?mJZ!$R)BtldXYdF>E;~ByUZl@A6Ezc>#o-N$Gf`^ z_wSls&so9b`|XXkv8u+i%t%mca5@K6aoJH3Ui8!@kdrHjdRalGuAodSm_+|^_pJIy z+C5Bv{e4?&>S)M7pZ&|&iKn>0Z6;0GFP#^QUVZ7LDzrqW+6Xd6tj`<^3A_1`zOrxY zJnHMws_T=z`p||yTb$anxL;$0$whJ9lFA4`?>%NN;qn-KyiY-eJLRj{$!=zG2>G0S z;6GLog^ND}d{RbEg(#T01hlTsj`>U|bp+v-UF+n&z*|XJuhE?t=>3dYt;q5u#f^^- z#^&ov1~i1KThv4A9P)Qkf`bYy_@LnlJ!-8O%6>wsJ-?Sb$eeYaV78{rY17iVjhLyP zl*|OE`{)x3yH#M z?Bpw|A!RfU(AZfP__X;CI`+fsLuT@FR%3xn5u4D`Db0}A2Y3WwIzPxg*GVw;;1gpH zUBitk78YYDC9mU@I>;5HiYuvlman0Ju)|PMl|e*-pRyhWDHbIR5)J;#2raxmr#o+BTH(uc{>TuTI?{Voe4}f*S#>8?*ll&ku{d9UQnjkj zorl4JR72NFgg1-bL74B2X>c}+i`<5r1bVDW{Lp!Ql8vo8#;!kGT}*%1|4qcl;>c&{ zdLQocR4l&{m9sPO)N&8)v0CyLVxyPWoQ5k`BaiuFDP>HamUWDQX9 z#$`Bt4TsL$zv$@}c+Air;V#R{#neDeF}arz^g>j|FceJ3hzqarO1*eC4e0QZ89eJ}M{Y z4aF1;Y4E~vwy4g{5)3x(pwMneU&kX(ePx#24YJ`L;(K){{a7>y{D4^000SaNLh0L04^f{04^f|c%?sf00007bV*G`2iXN0 z0xT<-sF_Rv03ZNKL_t(|+U=crkQ{e;_dk8ieeKTPt8?W`KI9wQU~FSz2Pc7~-aw@) zkQ7AlQoO`Kz@z|U3<;E@IL0{GkW^y74j2qnz(n{hV;dh>!pMiL)0SmfmaJn}d(J)G z)A#$wbdP#wXLn~;yV6Q_S9Nty@61k5f4|T7d4A9HI|RpZczb*6ZsOaI;H|yMbN!$o z2twnfw;$(m-Rh*<(!?dq4*`6BUvvo&1(HDAM}kHIyiJCM_4~FD{D6Diauq-j1VLy;1&&1l z5)M{^;Xf&LgE=vsaZV0>bNN=2jEFcjPys}}-q-d zOs}D_#W}eJ+T!2pZ)EDIRS;%;5Di~?Y#`1-0%Tbx9*f~(pt+dHvvtMt9PZU02!;SO ztr_?;6JheRn>`th75F)U-Xh?_?9F)^bwK7c>#%k-%#P)_v#vs6gvh zFH8sQe5gT-fD3a0b$-Yp`S6(am&wPdTultnBk;%Q`{&~K7or(7-~UsdrL|9$2A&c8g|hm&0zWG?m`h-|5N=@h z*f_^U0UE)d4Y(~?{VV~VEAX=ceLmd4vCzUf&I&LG7jSIEm`^t_AHIJsfC~$3_KSu9 z^Q{8M1Nb?CK3hdNes17I6JT+He=NYa0{VRU`?s_USWv4!7B!eH=*OZ8r>+ZV1apqy zpI|pI8^15i-df&v0aLn#d64_L0^cg=$La=zQ;7hr%KTgtJ@XFy9NfTcUBJmw3qLL{ zV7?_^m?z-p6Lev27IP}PfZ2n;P*y*0z|RHn6BHbtU>C5!;Lj)Uv-JJ5_4#i>7cf2e zO#y5{@`dTZXc1sxj@I5P=)$pD&8a|uV+H?Mtp2zF-)ektk_CwJf`fko+(65~w_?Ta zduHqN-|{YCu0Xdc`Lnfp@62(W+I7dat94UBsHIi~K{$SHV6LuU9tZTwCpGvp0Pi>s zI1Y}x+uqT(YuCMd6$AlMk`M#|Nfc|>@Q!l@d?A!!9^(i3gn_>hk}n8VfGrEhwlPf; z%d#*`vvzJ<7N%)p+cvgsSAiBqBuPS+r0P+YktB&gAb=#x$g+$miZx;^s3M%qE?~aE zpRLt<)@@l9mT6)b2AZZ(sZ>xa6)KeqhOSrtZdq0EO%rEg0#Ohkokk1RaKD1(EP_V1@OGJ;Ypq67kD&OH}- zq!jcHlvx{gvVQz#TD6bh8f zWz22ckY0F!=z#;I+u8{A^%3mrBhb-7sJokBZ!bZa0gAXD<{WO6>fym00g!}u6zw@0$&pj788bt~Q5k(QvvapIpY)wND z1S(@==%EnEa@CcD!&s>lmZlL&Cg~a*!`QF^W&L{cD^`&I;0H-|caut`NVT<9i4Y7T z%koJ#Aaed=fH?x&%6n|v#xPA3MWIkAkjrMtPfTDv^bpc7e~DZyBBxSxto<93m%f8o zPY);x#>52X_&BEPL;YhWy_Q1bbtH*MR~N#vW$2|6%AP%xAAXp?BadJ_@(7tf|8vS8 z`Vgg3iT3vPnj!>Uk~Nd>#Ym2p<_8c2!5a=10a8F0&s5a$hW! z$YwHRG8xoI9zpuaPhfNuIT54(#=j+X-UUeI5_+*zQ>NN)^fmovdp(d&Lo~|RZysaf zr#}VL#_8+B`N&6z+;9WwbefKi4ibq3p>P;kk`^4iU@(Xv2!8{N0mJSwPj$qw={Syk z+z2pJv_Dty9XGnybe&u-$He$J*@FkMzx6GIUAquMGCd#tYdYU`B^1jzn$fbA&*geB zm4tYd+%wNG^59Q!EC=WO^O0}4h4k{}bi1x091ho73vUeq%%!Q<0{Et7p{gpmT#oUv zF>>3t!?(VLFg`}d`){E8PyPtm60pi@^;5@SJthL2kVMdIz*iK7OeVwl*ckc8ABXRL7s0Hs$WgK;B3KfLI%sWsZ=UtGZ{ulMkwBY zKh95oiWCpC;!9s7mgvOHm8*IDDJ9zi$DvZpkb7k}+K~g8`4V=ij9n@rSWdMDAb=Q< zapExo{XK+wml9vGjBvUe5eeI#LKRFC?y$4G>u#~%h=c$g&+I?>n9HqUf$1&8$U@v7ck3OH6P%!a*qIxV51eY zOl;eZx_>X$;0TWC;8;%eJ46v7kw8c$!M1V6$E*LZoWmZ?p^2|iltjdIipW`K({|w+ z#6T1rvA(F}ey?(P7$F*D`9Ivu-hW&NKYD$qHs9eyoq?yCxYe(_N1IXRGWImBID!Z zWOnYvxa%&YWSFHN`%8qPiERm!eLN19Q(^3t%@lsW1v^s)K|*@ZdlBFFKIGM_2?m3N zLZMnt-?L1wm0|YuV4Zm;x~`+^I_lcB7*9Tl{n%qTZ@huBc{i24ZxXxg657r<7aSSE zsdqV6K|yjNEc^W5a^Q};G48yRiT?h&?+b-O3)(U`F$ypj@EcR6f`DllsEWeG!~_$A zgJ|FS7FZRQuKg$>CyZ@YgSmPFI2dN0!6zT5^2&Z31LVstL;mPTi3|)7i^Yh=Vnkyx zBH=KBK%klp^i}q}60zl03Kc~mo6RzM{q>ArbP?j4ZxX-f9_(zM($=kndwK{(J8_)J zfufx)5=g~Z`hPygq3_>MKqi}|{LOD-O^mYi!yh0V@4(dT$x*pzqZWo4e*7^iZ%!a~bP@c*7l^D`Lt9%L z$z+m5B0)G3As7hMq}XegH71HY5`@EH0E)$86|kqC!Y3TOrFX0I*Nq+BkO$z~}&@dU;zFVlJL z#UxgqhOGjQRsX;Ri_-8uh97+rV@x4%)@njG-%P5%pL9A+dwYAWlv}IX!L)61Q50(= zs8lK#hC$^1`>~(fgxs5;{SU4|4tL@h*3sZ|irhi@;3yqeUPFH8D;PU>kbmL{Vt@8$ zwRCYX7+e%CKwJpxpDp-~ZKLZtg<_FHK2PPxKSoT3Xuog`PRYSGt95?c?a*OcCQ4z5 z;m4mqA1xF7gX@T`TSxc6085rEp|7uxw)S@7@pygxehTotYo<%WiHQlu$Hyt(aR=t_ zHX`>Y=(z4Z$ia>(_*Rp>?*1%n)1t8ZAj|&hFL5Fg>JNTEK9{3dETU;zEl+St5TNN; zzA0W$N(nm-hG9@Dm&s+bl>g^{Fh_>zy!v7!F^+B8b>Lf;iEd>%^7K<^BPzjb-%ISX zpQXE}hrYf(IyyRt#p8tBqKpUbbl^MgJrs*Y#>U1N85tq7ZXNpXHXzE$}XfX(ErUbTU z{e=R>Y!>C`KPS)`BXQ=L*s3$h(i}ly^toqH4`h%|KZDrkK1Wwq7d<^awe)MiO}|d( zFFiu|!5<$VXJllA^1uH(^3WiOHOuLL?^Oha6zDdXCRkRL1n%y)e#bKFiC~&!UVW3! zYpz0wNvJ>j8HG%SLZPr|R6tx1zdxNaG$ws43q@5a777#}cmS(Zpz}SKBIp5ZtHJsm zo$P^KvizJ?qDEuetYUStd?M!4b_mQdpXfsX*fLHJHUbg&+QqLZN`Fs#umaB{8RSS&KIY zXlCuxR0AEiJC~{`)oxrHHxlfQ6YF1sU174kIt8*jcA+aavCn*lwxvty?Cd0&N)Zl) z>LP2QN&USs@^1v+e&``OdwU4>g^^_&sT{zmSO~7Q`xO8`5o{CWBDt5hVmrztmyq4H zkMtE+AS7j!r=Lbq6ck0N5nv{;n~`F^h%vuusaXbrw&S3?J$uT#ccYDu6FYMmXacrn zH%Pxu?%;0J;S!-WYlvTV8R>MIw)S=+ZlphxAdtp z#GpX=si#mBg-WGzY7k&9WN;h@%QR6{m5QdJZrzHU3K2}SVd?e3pW_%5Mh~HmSBPG5 z1@W`aBAH4N^VQp1DN8pC_@pPFWW_r!MGWLoU)hE;WFY7^jtjnEnl%M*eBe8_NuU$P zc5Ws6+62%4vjXGRJFOT3j^=_azI7)QMQpA{y^3qGyUdyVAX__arH6SjC^fw6# zg~_?hs{dD2l*5Oq3=R@o*@ftYu#9@^LDDG?k7BAek?XD_9*>huBnbKZ{#?KppL&Xc zcV7uo7G?AAu@9C(tIEFUlYYFx7pT<;~+^(iX zc_;&sDA9MkgSgu_-;@41f&cW=th(lEFa{|-^Bb&vB?Qez@T}egydm%doA5x4!L2Vc z{_+UHfzye7?Q6u-X@XZ@jUxq6N6Uz^GAT)A8N@Z^(dns&k!U_r{kLJMT@ z`t_TJfj%?@K>;}u!qRLU`)HX}5Nx#4I9kpieD(!|!(n2vnAaa}PT)I_KhD54S7Rw} zQhMr9?0rQ<-8c81FRw2;4z_D4wg}PF+t~N~CdPJ-5gs_5?%Qr7kxnCuB85T$^W1YW z_Pva)tJs!Y_gixN50hLqz>&Z%jG-ZPceGOVcM2yj0fad;4;?rEZ&?;*Hj5CJ5Cj>^ zw82+ql|mM3CWoe4v|V~>ZJ3rnzdsxB%|{+#;GLIamiM7-_zl*cB7zTm!Qa8esrDEW z0D=lX-GYSR7fQTcC*rruiT@ynVrlw z8hh8Y0=~9>Jl8?YRwDujUcVD%jrC+WxAZcU_1C39sqm)n_*{yknHeidFQl zTSr%aKRrD?Boc{QKZfAarC64YSi2%ly^I;Ol_lI1Wa+jMyCl zq@!!$cnToeR8#{;mXXuxT8ECAz%P|bHSqI4{xMz4mLNJu5VpU9y(^EXIW@`m`+mo9 zs{Y<_2(%@T&+lZ{rVSk0Hb!LG0Ly*gr_*U7kx2DHy2HrBOO{|GpeqKiKlUp!2bH=d zylm4(3<+psiM#t1JRVC5Zm(An2V zPfrgW9UZlnh&RuGBuNOej1-Mx>NbIhSX<1;%lccUgBX!8Dhjq+5IQwtfLX)=bw5{< zumQ)iDCe>mpgy1+xfNdE#qJUx4kN9Z?{QbAxMl2K{yi6c=Xc*y@0)lpwTz={I z`~TpQUN|SkwoMz^|I#S2B`aCwg5TTQOFEsdYlb#%mgC@vBC;UV2w;1^TM)3F>T4P@ zm@~NmXR*!?Ahbw^y2Ow>-}IYM__*GxA!o+JiO{AsUI$<0KJ-BDzw-D3_bc zWi#+!z4>MeyI!W_y0dAUP!M-32!@Cxiy+I@>0o3TArJsrKH4)$0&A(m_UC`c8!wF# z>tDfX-~Kk8eSP%x_0itm-V}VVU!iT9=&FhsN?>bt-4bgS5C{ucmV*=yBf7zCtKGwd z#XLVivuxmWGjEBWWC{XOEQX<1h{WYujzBM$vCCy5Ln?AAfDNd7_hPMDRY!tm;P1Nm zX2v(az|wQuSQZ9xXAY}mBA6zYVIb&wwXCb_IGTo|>s8ki5HZh7@%j+#-;pQUzk)OF zymLnIYk3XRq%t&ws$4jD!m`I>jY^1P^Ak?pdXckQ(Q3)w5p&UMp zuIm_vQJWUV5B|=te1)->US!Fd6z3!ZgtiqBT=2bJZgw6Ju`WolJ0P%kd!BIra?WlM ze9!s~!>HcR?%fa&35G=MidE+VL{UHrikO;>oJ`gc;FN9!G*bdW5Rlytn{rzl#vuzS zAR-0CYAMzuK-oeDLP?p@{{3j0R+|9IFbp!83?m~W?D+DR8QZd%LGA1c3orCJ+b^9T-4UEF2)x9;|z4wqb)}5le>{ z-?N9}@G!Ytt~zjIbd=%YVYYqw%N*IdnPnHYaZX#1z}6f>1Ncq%pon!*JKF*xySHYE z^epAP?|g^OB}-ZY-vdt9^=dDle4gySeI&ZW*m(oTu!c*W zY^@(aa^)`+3eoY0e~4j1F|U#84o_PB&={|f?g?Yr7Ka{ukdcuQ4jnqgkt0Xg_N6az zWb0-IE@8>Egs6NU6C%QwF^9H5`-8bAoI2@kyAd$t{C9Fm3_d06^0s$hC2=UHN zl1rA58P$n)hY%&9?!lFZDx@M3iLNk%8#gjKI?B+{5Ies5Rfe{2;k1j|Ij5}(zR(2x zW|psC+QHVK#E#7w!d*+a;I6yqo&o&Ewz?<%rBaDfsl?Euj}l7O=sHYGbb>A1ek^Od-Lj-7BM%3kCAO`yFrIemg@uwsOYB?VQ~fzUuQIM1p4TS=7zI>8!u@Pym`-5$hvegfNP`Hg#x2q6*eicU5ML|%M;f=EX{ zm;UR&P7D52=~>&Iz@ktnFflQ~8xKB6VPcFGXC)91E9gbDUjBn3vHl2SgDSDEE)pwN z5Q#**IeuCKbUIQj3I&+0^#h1*9N-P%UV7bi3>{Qa97vxYpG2Jet^$2AnTxLJ;q3Mh z`g0T51-(uwnvx(SVO-J43$n=b&y5jH_i&3=p)k&Y8plrkFVU;DJu*m=L?jX+nM`upU;j0s zSdu-j6-k_tA`q49K4+Fq?AP)P$O7u7am>74A6GOXffSN3uI%K+pvd!^#)zc*xZ=Ct zoiq5Frq#e7-nWlUx7o&qtM_-S$W?(n}*LLLz zuZ$CF57m9%u^gL||Er*LUYh_VlszkN=pIJBHDd|LH7!DrGo%T)+(0 z?=K96!{OSj9_M`g;|vcf9G-+RM0(!Q!J%=D zks*b%)~>D1GVaY2)Xb`<+x_odd{e}yd z@2BswPDXw|23{9yzdSbqU%IA;?E#UepB^Tb=;Vrj`4>{1oz;P)$z)A_JsI^!;$BxZ z!!T+YzFaOxKAUCdefP2B=Rc>dE5N%i@1ZZFQFwj=+immLdKQZU-IsMTCJAhPVVouB zUqIiLSCUGl>U{rf6FUCRFuu<@)9lB$9NbFqHD$C zUB8~K|Mg!e$zh_&4le%s*J*EWCzVP~ z+w_}`42=r4=;sJ(B}dTc3S?O(7K_o@*@>>}T(fQ+kKA}88()~|#p#BCFl_7t0%TJp3wtZrk|rZIF);;MiC7VRA!q|@n1k$)?| zH?#ivOa;p)%HpSDc_tE-&mvdZmc_#r;ptPre(?tJ@&nxtw zDRaY_DK0uQ$@Vu2?0ss*QsFToWrfhbGL@|vl!AfP%+ZPh9T%t3&PuUyXNKLc7I^1J zKgN>FFQ>D!vrhV(fj<`oXb`_y`TgVP0_Mn4FFDlS-d=rRfAv=&Nj!6}Y;#)x01Hq_ zL_t*FedJ39?|o-CU8xYcjUzakHR%c16^qKQ9D&#JELk3B>6uB)@P4ey5cyX{B5D$S zC_*Kazz_q3oeHv{kT5jlgC#m56+&-jF%Oi{h7>f-Y6`sLHa9!2>L$}2;pyLxGjvGd zU28winIHThU0q$IQmIz2FQb0NXJ_X=)F{#$SbUOZ5{_Tdkc zTDg)(zxhpm`Pd-uySA4Vf3lR)??M%z}w#+L*p4+qgSGs3$`B%5L5i;qk|YFjSH2zwr(FFS&&7?(W*a z5wGCWX#G>Q1g0Z^7eESvz?Lmrb_0gHn+-&04ZHbRFhWt3=CQyOGC$J{UjEHX!erf< z!-wB^gGc`U?+c^o^Z*ZTQ29vi!bHRkIZ31s9Nt9pL$-g|lKYhUBx-|S%9k{DNB+QAwB zYbBxAawyv}*os-}ifuJo7q(M5E5*xMjpv>mW^k~~*%w~K z6<_@-?df#Q=X)cVn-zOJHK?9Cb;{sSojB9~97R|1y&{yVs@CT|_ql&KaNxkG3%CMQ zT#nk+f7MzuVc`^Ds`FVWfIsr}a`7(vnV6X1& z5FJ*q-z;GcDxg^ix`pV}7maGoIYGb)i4c;I`=SUdV<>$waw6;-t?=sJ0=xGXNXJuL z@v)Dw{Mu`Y$K$mWwzoWtAP7g7sI~3tqSdzDw76qg)p>_)_x+a7`n`OG>E>w=&4fnRpPH(FUMXkiI32Qsum0Ds1=QmNF4kk98S7K`lo(T~{i*kc@- zm>?XHSh+05Su5k5-5yyBJ_uOby#7? zt_cnvE~8Z}db_$f?}i&V`$Hch7K_zpBag*mwK<7BkiEoG-Fn4Mc5jHh_dZ#cNhXs- zBGKx!xVl~g|2Mz+&9A=x^{@W|$hp90UGPh;3ou&s{l||0$0+~JuKORds;ZPqrP|W5 zilR^$9cB0W_3VA_ISvgCA&3sWJxRJc6Lh9yw5P(Pl3`Nu0J0oFw&uF*0wwfm$eFsw&tvav(q?98qNk^a{{DV| zNyPDv`}Xa7^%I}?#J4MzO4cX+IiToTzV3oQ>rMjmKQgG*^Lc6|ctfiKfdJ8Hlw>kl z8>OSFDvF}evvzG$IreAQyzzP7a4lJu357yLB9YosGoH0Kt~TQB{wkuPl*#7{7>0hd zH4}@~K=1DE#&Mh}LHCY>gM$Y?^O?_lw^FI(U5n2H1=r^v6ZrF53phUA3WRABN8WNQ zkw}DiJYMs|o&@`M|I8d7@ZSIM2;e_9&NDn!XAQ%s`fXKJ(RGaveBc93@7Z$|)9qtp zW1~0Sbkm*LY<9x6_*^sDpGyk(gwz6#2SNN+?2(~ahl|FPtOu@tNts6X=vR$q1h6a{ z+qTg4st=!vu$E;_YxR0{ncV!Xx8C~gg9i@|H(358@SS5Q4KBJ`fG|&=X*R32GsSwq zjj#Q^bsEXi?EL8NMe|kV@j9N&1b*XQDwWiK`lo;T&i3uw54ym6mS1$i*S(~VbBuLi z`8b;Ay!!uRaUqSQn5sZC6;T?`?S|e&RX1Z1kH=e*eLo>oRW-l={qNtiVZ(+!KFePa z@L?f4h0@||Aq%Jst(I7u0pIGm<2X3>Q5SPG$h1hMQj@@+PWm-Xvw!^KAOGhsfBDO8 zKFiPfz}Myz{COk5;#mf9_N18~5**FyS(b(22AaLSy|vh3rhre^b$<1$Up;u=efK@v zMD`UQ_?9z|g9Aj~-li?b&g}cEa|#}nf2)@J@9F8O4JMh%`k#5`nTPMZ^UlZIKri0} zd}BT>e_;r4j4~~pz@Tv|zdth}oLYgWMgqg2t*xzA!JBE{x^?S@TW`Jfzg_S>+4r~g zJ@Ds!MoIClYVoZM#+(ZL#`hh~>suIRHS5>k-#??(>$=XXufDqZrkifM&j)^EqQ_h) z@E4Wo5&SJ%wru$Hr$2qSWm)-#M33L{t%U;r z_$k0KHUSsvGc)zan~oSx&H5c|r@FkIX&9J>L04B7q9{%)`_DY{%&-3L@BZ#y%d&F5 z{C=bCA5)fZapeI{k^pcz7trtq@1zz3n5Kncn5K5ADi!F2*{*Tsis!1s;c?G0}uR%+th33^F8p*;{pEL3eZ9> zVCn$!Xm>8J`Da@;U0q#GGkm2|N&VjUzIXRS4?VQC0sLZv>>oGq-;xAqR^*v&a9B?U zFZ*ZVI5ur(<@*^wUqj)+D_<$>%%AasK$X6=0s!!?x|(IEKcO?k%_6 z@-N%AZ9C8ee1DSXxP!l-1Q6ar;CmHBZ|xw@`VSvIeBkq+|NI>X4jeerAp4~z8NL&; zY{-HUzw|DQ}T{qr%<6W6dW`d)+{Q}JkJ11<-n73WPJb>@@Ci1L* z+qP}befG1T{SUVfpC5d$ndg;sE$2k87c+}Q&@s(IaVmk`?6oM0wVu3BJn_W(Pk!=~ z->XzA6TSpbQ=fmL!GB9Cz-*qme<0@M&w1cWR4PSuU3VUM;DLMZzWeUq`r7u*I&jWw zJ^w@!pk)PE?D0S&(9<;)k9IY4YD2bkUANb*Tldeu_{A@_Gr64G*vwlr;Lnu+je}^B zBniv1PU*aX|LEU~#iD-WjW>Svx#ymHv%%^;$*+}j7aRCHa7UsI8oNGW+wn zY}>bQfAvEj`p{kb_U#*WK`%4}dndGw-_mnvab%D{b$ERUhyiUthkJ~<*Cpn+8{nc0 zA>5;91iEYGCD-CT$@e0C&+-=~__I|2$7QnN%39IARsn)c9&&%G5}-CEkqffwf?oDT z`I^t)+l%=JwL}2NaU4MqJoz`=ABuZkhPqYRPWgs}Fa9xH5>(vXOYSVHNPi&|0QbD& z{*PvJE}Y5)Xe5E@JK84;_$|i(4Y5GuN-R@ZzHkZw-fXjdMo~9^)X4(=tO?*FLE{c@ zoz%ad6m?nAlL`1a6QF?zr=rErS`|)4;E$OAZ*Ql@_WuF%eHI6si$4bd0000' % ( - img, w, h, -descent) - -class HandleEq(basenodehandler.NodeHandler, eq_node): - - def get_text(self, client, node, replaceEnt): - return '%s'%(node.label, - client.styles.linkColor, node.label) - diff --git a/dist-packages/rst2pdf/rst2pdf/math_flowable.py b/dist-packages/rst2pdf/rst2pdf/math_flowable.py deleted file mode 100644 index 148ff2acf..000000000 --- a/dist-packages/rst2pdf/rst2pdf/math_flowable.py +++ /dev/null @@ -1,176 +0,0 @@ -# -*- coding: utf-8 -*- -# See LICENSE.txt for licensing terms - -import tempfile -import os -import re - -from reportlab.platypus import * -from reportlab.pdfbase.ttfonts import TTFont -from reportlab.pdfbase import pdfmetrics - -from opt_imports import mathtext - - -from log import log - -HAS_MATPLOTLIB = mathtext is not None - -if HAS_MATPLOTLIB: - from matplotlib.font_manager import FontProperties - from matplotlib.colors import ColorConverter -fonts = {} - -def enclose(s): - """Enclose the string in $...$ if needed""" - if not re.match(r'.*\$.+\$.*', s, re.MULTILINE | re.DOTALL): - s = u"$%s$" % s - return s - -class Math(Flowable): - - def __init__(self, s, label=None, fontsize=12,color='black'): - self.s = s - self.label = label - self.fontsize = fontsize - self.color = color - if HAS_MATPLOTLIB: - self.parser = mathtext.MathTextParser("Pdf") - else: - log.error("Math support not available," - " some parts of this document will be rendered incorrectly." - " Install matplotlib.") - Flowable.__init__(self) - self.hAlign='CENTER' - - def wrap(self, aW, aH): - if HAS_MATPLOTLIB: - try: - width, height, descent, glyphs, \ - rects, used_characters = self.parser.parse( - enclose(self.s), 72, prop=FontProperties(size=self.fontsize)) - return width, height - except: - pass - # FIXME: report error - return 10, 10 - - def drawOn(self, canv, x, y, _sW=0): - if _sW and hasattr(self,'hAlign'): - from reportlab.lib.enums import TA_LEFT, TA_CENTER, TA_RIGHT, TA_JUSTIFY - a = self.hAlign - if a in ('CENTER','CENTRE', TA_CENTER): - x = x + 0.5*_sW - elif a in ('RIGHT',TA_RIGHT): - x = x + _sW - elif a not in ('LEFT',TA_LEFT): - raise ValueError, "Bad hAlign value "+str(a) - height = 0 - if HAS_MATPLOTLIB: - global fonts - canv.saveState() - canv.translate(x, y) - try: - width, height, descent, glyphs, \ - rects, used_characters = self.parser.parse( - enclose(self.s), 72, prop=FontProperties(size=self.fontsize)) - for ox, oy, fontname, fontsize, num, symbol_name in glyphs: - if not fontname in fonts: - fonts[fontname] = fontname - pdfmetrics.registerFont(TTFont(fontname, fontname)) - canv.setFont(fontname, fontsize) - col_conv=ColorConverter() - rgb_color=col_conv.to_rgb(self.color) - canv.setFillColorRGB(rgb_color[0],rgb_color[1],rgb_color[2]) - canv.drawString(ox, oy, unichr(num)) - - canv.setLineWidth(0) - canv.setDash([]) - for ox, oy, width, height in rects: - canv.rect(ox, oy+2*height, width, height, fill=1) - except: - # FIXME: report error - col_conv=ColorConverter() - rgb_color=col_conv.to_rgb(self.color) - canv.setFillColorRGB(rgb_color[0],rgb_color[1],rgb_color[2]) - canv.drawString(0,0,self.s) - canv.restoreState() - else: - canv.saveState() - canv.drawString(x, y, self.s) - canv.restoreState() - if self.label: - log.info('Drawing equation-%s'%self.label) - canv.bookmarkHorizontal('equation-%s'%self.label,0,height) - - def descent(self): - """Return the descent of this flowable, - useful to align it when used inline.""" - if HAS_MATPLOTLIB: - width, height, descent, glyphs, rects, used_characters = \ - self.parser.parse(enclose(self.s), 72, prop=FontProperties(size=self.fontsize)) - return descent - return 0 - - def genImage(self): - """Create a PNG from the contents of this flowable. - - Required so we can put inline math in paragraphs. - Returns the file name. - The file is caller's responsability. - - """ - - dpi = 72 - scale = 10 - - try: - import Image - import ImageFont - import ImageDraw - import ImageColor - except ImportError: - from PIL import ( - Image, - ImageFont, - ImageDraw, - ImageColor, - ) - - if not HAS_MATPLOTLIB: - img = Image.new('RGBA', (120, 120), (255,255,255,0)) - else: - width, height, descent, glyphs,\ - rects, used_characters = self.parser.parse( - enclose(self.s), dpi, prop=FontProperties(size=self.fontsize)) - img = Image.new('RGBA', (int(width*scale), int(height*scale)),(255,255,255,0)) - draw = ImageDraw.Draw(img) - for ox, oy, fontname, fontsize, num, symbol_name in glyphs: - font = ImageFont.truetype(fontname, int(fontsize*scale)) - tw, th = draw.textsize(unichr(num), font=font) - # No, I don't understand why that 4 is there. - # As we used to say in the pure math - # department, that was a numerical solution. - col_conv=ColorConverter() - fc=col_conv.to_rgb(self.color) - rgb_color=(int(fc[0]*255),int(fc[1]*255),int(fc[2]*255)) - draw.text((ox*scale, (height - oy - fontsize + 4)*scale), - unichr(num), font=font,fill=rgb_color) - for ox, oy, w, h in rects: - x1 = ox*scale - x2 = x1 + w*scale - y1 = (height - oy)*scale - y2 = y1 + h*scale - draw.rectangle([x1, y1, x2, y2],(0,0,0)) - - fh, fn = tempfile.mkstemp(suffix=".png") - os.close(fh) - img.save(fn) - return fn - - -if __name__ == "__main__": - doc = SimpleDocTemplate("mathtest.pdf") - Story = [Math(r'\mathcal{R}\prod_{i=\alpha\mathcal{B}}'\ - r'^\infty a_i\sin(2 \pi f x_i)')] - doc.build(Story) diff --git a/dist-packages/rst2pdf/rst2pdf/nodehandlers.py b/dist-packages/rst2pdf/rst2pdf/nodehandlers.py deleted file mode 100644 index 98e644e60..000000000 --- a/dist-packages/rst2pdf/rst2pdf/nodehandlers.py +++ /dev/null @@ -1,17 +0,0 @@ -# -*- coding: utf-8 -*- -# See LICENSE.txt for licensing terms -#$URL$ -#$Date$ -#$Revision$ - -# Import all node handler modules here. -# The act of importing them wires them in. - -import genelements -import genpdftext - -#sphinxnodes needs these -from genpdftext import NodeHandler, FontHandler, HandleEmphasis - -# createpdf needs this -nodehandlers = NodeHandler() diff --git a/dist-packages/rst2pdf/rst2pdf/oddeven_directive.py b/dist-packages/rst2pdf/rst2pdf/oddeven_directive.py deleted file mode 100644 index 8d5ee7b52..000000000 --- a/dist-packages/rst2pdf/rst2pdf/oddeven_directive.py +++ /dev/null @@ -1,33 +0,0 @@ -# -*- coding: utf-8 -*- - -"""A custom directive that allows alternative contents to be generated -on odd and even pages.""" - -from docutils.parsers import rst -from docutils.nodes import Admonition, Element -from docutils.parsers.rst import directives - -class OddEvenNode(Admonition, Element): - pass - -class OddEven(rst.Directive): - """A custom directive that allows alternative contents to be generated - on odd and even pages. It can contain only two children, so use containers - to group them. The first one is odd, the second is even.""" - - required_arguments = 0 - optional_arguments = 0 - final_argument_whitespace = True - option_spec = {} - has_content = True - - def run(self): - self.assert_has_content() - text = '\n'.join(self.content) - node = OddEvenNode() - self.state.nested_parse(self.content, self.content_offset, node) - return [node] - - -directives.register_directive("oddeven", OddEven) - \ No newline at end of file diff --git a/dist-packages/rst2pdf/rst2pdf/opt_imports.py b/dist-packages/rst2pdf/rst2pdf/opt_imports.py deleted file mode 100644 index 8cbe5d43f..000000000 --- a/dist-packages/rst2pdf/rst2pdf/opt_imports.py +++ /dev/null @@ -1,126 +0,0 @@ -# -*- coding: utf-8 -*- - -#$URL$ -#$Date$ -#$Revision$ - -# See LICENSE.txt for licensing terms - -''' -opt_imports.py contains logic for handling optional imports. - -''' - -import os -import sys - -from log import log - -PyHyphenHyphenator = None -DCWHyphenator = None -try: - import wordaxe - from wordaxe import version as wordaxe_version - from wordaxe.rl.paragraph import Paragraph - from wordaxe.rl.styles import ParagraphStyle, getSampleStyleSheet - # PyHnjHyphenator is broken for non-ascii characters, so - # let's not use it and avoid useless crashes (http://is.gd/19efQ) - - #from wordaxe.PyHnjHyphenator import PyHnjHyphenator - # If basehyphenator doesn't load, wordaxe is broken - # pyhyphenator and DCW *may* not load. - - from wordaxe.BaseHyphenator import BaseHyphenator - try: - from wordaxe.plugins.PyHyphenHyphenator \ - import PyHyphenHyphenator - except: - pass - try: - from wordaxe.DCWHyphenator import DCWHyphenator - except: - pass - -except ImportError: - # log.warning("No support for hyphenation, install wordaxe") - wordaxe = None - wordaxe_version = None - BaseHyphenator = None - from reportlab.lib.styles import getSampleStyleSheet, ParagraphStyle - from reportlab.platypus.paragraph import Paragraph - - -try: - import sphinx -except ImportError: - sphinx = None - -try: - import psyco -except ImportError: - class psyco(object): - @staticmethod - def full(): - pass - -try: - import aafigure - import aafigure.pdf -except ImportError: - aafigure = None - -try: - from reportlab.platypus.flowables import NullDraw -except ImportError: # Probably RL 2.1 - from reportlab.platypus.flowables import Flowable as NullDraw - -try: - from matplotlib import mathtext -except ImportError: - mathtext = None - -class LazyImports(object): - ''' Only import some things if we need them. - ''' - - def __getattr__(self, name): - if name.startswith('_load_'): - raise AttributeError - value = None - if not os.environ.get('DISABLE_' + name.upper()): - func = getattr(self, '_load_' + name) - try: - value = func() - except ImportError: - pass - # Cache the result once we have it - setattr(self, name, value) - return value - - def _load_pdfinfo(self): - try: - from pyPdf import pdf - except ImportError: - import pdfrw as pdf - return pdf - - def _load_PILImage(self): - try: - from PIL import Image as PILImage - except ImportError: - import Image as PILImage - return PILImage - - def _load_PMImage(self): - from PythonMagick import Image - return Image - - def _load_gfx(self): - import gfx - return gfx - - def _load_svg2rlg(self): - import svg2rlg - return svg2rlg - -LazyImports = LazyImports() diff --git a/dist-packages/rst2pdf/rst2pdf/pdfbuilder.py b/dist-packages/rst2pdf/rst2pdf/pdfbuilder.py deleted file mode 100644 index f1e127597..000000000 --- a/dist-packages/rst2pdf/rst2pdf/pdfbuilder.py +++ /dev/null @@ -1,915 +0,0 @@ -# -*- coding: utf-8 -*- -""" - Sphinx rst2pdf builder extension - ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - - Usage: - 1. In conf.py add 'rst2pdf.pdfbuilder' element to 'extensions' list: - extensions = ['rst2pdf.pdfbuilder'] - 2. Modify your Makefile or run it with: - $ sphinx-build -d_build/doctrees -bpdf . _build/pdf - - :copyright: Copyright 2009 Roberto Alsina, Wojtek Walczak - :license: BSD, see LICENSE for details. -""" - -import logging -try: - import parser -except ImportError: - # parser is not available on Jython - parser = None -import re -import sys -import os -from os import path -from os.path import abspath, dirname, expanduser, join -from pprint import pprint -from copy import copy, deepcopy -from xml.sax.saxutils import unescape, escape -from traceback import print_exc -from cStringIO import StringIO -from urlparse import urljoin, urlparse, urlunparse - -from pygments.lexers import get_lexer_by_name, guess_lexer - -from docutils import writers -from docutils import nodes -from docutils import languages -from docutils.transforms.parts import Contents -from docutils.io import FileOutput -import docutils.core - -import sphinx -from sphinx import addnodes -from sphinx.builders import Builder -from sphinx.util.console import darkgreen, red -from sphinx.util import SEP -from sphinx.util import ustrftime, texescape -from sphinx.environment import NoUri -from sphinx.locale import admonitionlabels, versionlabels -if sphinx.__version__ >= '1.': - from sphinx.locale import _ - -from rst2pdf import createpdf, pygments_code_block_directive, oddeven_directive -from rst2pdf.log import log -from rst2pdf.languages import get_language_available - - -class PDFBuilder(Builder): - name = 'pdf' - out_suffix = '.pdf' - - def init(self): - self.docnames = [] - self.document_data = [] - - def write(self, *ignored): - - self.init_document_data() - - if self.config.pdf_verbosity > 1: - log.setLevel(logging.DEBUG) - elif self.config.pdf_verbosity > 0: - log.setLevel(logging.INFO) - - for entry in self.document_data: - try: - docname, targetname, title, author = entry[:4] - # Custom options per document - if len(entry)>4 and isinstance(entry[4],dict): - opts=entry[4] - else: - opts={} - self.info("processing " + targetname + "... ", nonl=1) - self.opts = opts - class dummy: - extensions=self.config.pdf_extensions - - createpdf.add_extensions(dummy()) - - self.page_template=opts.get('pdf_page_template',self.config.pdf_page_template) - - docwriter = PDFWriter(self, - stylesheets=opts.get('pdf_stylesheets',self.config.pdf_stylesheets), - language=opts.get('pdf_language',self.config.pdf_language), - breaklevel=opts.get('pdf_break_level',self.config.pdf_break_level), - breakside=opts.get('pdf_breakside',self.config.pdf_breakside), - fontpath=opts.get('pdf_font_path',self.config.pdf_font_path), - fitmode=opts.get('pdf_fit_mode',self.config.pdf_fit_mode), - compressed=opts.get('pdf_compressed',self.config.pdf_compressed), - inline_footnotes=opts.get('pdf_inline_footnotes',self.config.pdf_inline_footnotes), - splittables=opts.get('pdf_splittables',self.config.pdf_splittables), - default_dpi=opts.get('pdf_default_dpi',self.config.pdf_default_dpi), - page_template=self.page_template, - invariant=opts.get('pdf_invariant',self.config.pdf_invariant), - real_footnotes=opts.get('pdf_real_footnotes',self.config.pdf_real_footnotes), - use_toc=opts.get('pdf_use_toc',self.config.pdf_use_toc), - toc_depth=opts.get('pdf_toc_depth',self.config.pdf_toc_depth), - use_coverpage=opts.get('pdf_use_coverpage',self.config.pdf_use_coverpage), - use_numbered_links=opts.get('pdf_use_numbered_links',self.config.pdf_use_numbered_links), - fit_background_mode=opts.get('pdf_fit_background_mode',self.config.pdf_fit_background_mode), - baseurl=opts.get('pdf_baseurl',self.config.pdf_baseurl), - section_header_depth=opts.get('section_header_depth',self.config.section_header_depth), - srcdir=self.srcdir, - style_path=opts.get('pdf_style_path', self.config.pdf_style_path), - floating_images=opts.get('pdf_floating_images',self.config.pdf_floating_images), - config=self.config, - ) - - tgt_file = path.join(self.outdir, targetname + self.out_suffix) - destination = FileOutput(destination=open(tgt_file,'wb'), encoding='utf-8') - doctree = self.assemble_doctree(docname,title,author, - appendices=opts.get('pdf_appendices', self.config.pdf_appendices) or []) - doctree.settings.author=author - doctree.settings.title=title - self.info("done") - self.info("writing " + targetname + "... ", nonl=1) - docwriter.write(doctree, destination) - self.info("done") - except Exception, e: - log.error(str(e)) - print_exc() - self.info(red("FAILED")) - - def init_document_data(self): - preliminary_document_data = map(list, self.config.pdf_documents) - if not preliminary_document_data: - self.warn('no "pdf_documents" config value found; no documents ' - 'will be written') - return - # assign subdirs to titles - self.titles = [] - for entry in preliminary_document_data: - docname = entry[0] - if docname not in self.env.all_docs: - self.warn('"pdf_documents" config value references unknown ' - 'document %s' % docname) - continue - self.document_data.append(entry) - if docname.endswith(SEP+'index'): - docname = docname[:-5] - self.titles.append((docname, entry[2])) - - def assemble_doctree(self, docname, title, author, appendices): - - # FIXME: use the new inline_all_trees from Sphinx. - # check how the LaTeX builder does it. - - self.docnames = set([docname]) - self.info(darkgreen(docname) + " ", nonl=1) - def process_tree(docname, tree): - tree = tree.deepcopy() - for toctreenode in tree.traverse(addnodes.toctree): - newnodes = [] - includefiles = map(str, toctreenode['includefiles']) - for includefile in includefiles: - try: - self.info(darkgreen(includefile) + " ", nonl=1) - subtree = process_tree(includefile, - self.env.get_doctree(includefile)) - self.docnames.add(includefile) - except Exception: - self.warn('%s: toctree contains ref to nonexisting file %r'\ - % (docname, includefile)) - else: - sof = addnodes.start_of_file(docname=includefile) - sof.children = subtree.children - newnodes.append(sof) - toctreenode.parent.replace(toctreenode, newnodes) - return tree - - tree = self.env.get_doctree(docname) - tree = process_tree(docname, tree) - - self.docutils_languages = {} - if self.config.language: - self.docutils_languages[self.config.language] = \ - get_language_available(self.config.language)[2] - - if self.opts.get('pdf_use_index',self.config.pdf_use_index): - # Add index at the end of the document - - # This is a hack. create_index creates an index from - # ALL the documents data, not just this one. - # So, we preserve a copy, use just what we need, then - # restore it. - #from pudb import set_trace; set_trace() - t=copy(self.env.indexentries) - try: - self.env.indexentries={docname:self.env.indexentries[docname+'-gen']} - except KeyError: - self.env.indexentries={} - for dname in self.docnames: - self.env.indexentries[dname]=t.get(dname,[]) - genindex = self.env.create_index(self) - self.env.indexentries=t - # EOH (End Of Hack) - - if genindex: # No point in creating empty indexes - index_nodes=genindex_nodes(genindex) - tree.append(nodes.raw(text='OddPageBreak twoColumn', format='pdf')) - tree.append(index_nodes) - - # This is stolen from the HTML builder's prepare_writing function - self.domain_indices = [] - # html_domain_indices can be False/True or a list of index names - indices_config = self.config.pdf_domain_indices - if indices_config and hasattr(self.env, 'domains'): - for domain in self.env.domains.itervalues(): - for indexcls in domain.indices: - indexname = '%s-%s' % (domain.name, indexcls.name) - if isinstance(indices_config, list): - if indexname not in indices_config: - continue - # deprecated config value - if indexname == 'py-modindex' and \ - not self.config.pdf_use_modindex: - continue - content, collapse = indexcls(domain).generate() - if content: - self.domain_indices.append( - (indexname, indexcls, content, collapse)) - - # self.domain_indices contains a list of indices to generate, like - # this: - # [('py-modindex', - # , - # [(u'p', [[u'parrot', 0, 'test', u'module-parrot', 'Unix, Windows', - # '', 'Analyze and reanimate dead parrots.']])], True)] - - # Now this in the HTML builder is passed onto write_domain_indices. - # We handle it right here - - for indexname, indexcls, content, collapse in self.domain_indices: - indexcontext = dict( - indextitle = indexcls.localname, - content = content, - collapse_index = collapse, - ) - # In HTML this is handled with a Jinja template, domainindex.html - # We have to generate docutils stuff right here in the same way. - self.info(' ' + indexname, nonl=1) - print - - output=['DUMMY','=====','', - '.. _modindex:\n\n'] - t=indexcls.localname - t+='\n'+'='*len(t)+'\n' - output.append(t) - - for letter, entries in content: - output.append('.. cssclass:: heading4\n\n%s\n\n'%letter) - for (name, grouptype, page, anchor, - extra, qualifier, description) in entries: - if qualifier: - q = '[%s]'%qualifier - else: - q = '' - - if extra: - e = '(%s)'%extra - else: - e = '' - output.append ('`%s <#%s>`_ %s %s'%(name, anchor, e, q)) - output.append(' %s'%description) - output.append('') - - dt = docutils.core.publish_doctree('\n'.join(output))[1:] - dt.insert(0,nodes.raw(text='OddPageBreak twoColumn', format='pdf')) - tree.extend(dt) - - - if appendices: - tree.append(nodes.raw(text='OddPageBreak %s'%self.page_template, format='pdf')) - self.info() - self.info('adding appendixes...', nonl=1) - for docname in appendices: - self.info(darkgreen(docname) + " ", nonl=1) - appendix = self.env.get_doctree(docname) - appendix['docname'] = docname - tree.append(appendix) - self.info('done') - - self.info() - self.info("resolving references...") - #print tree - #print '--------------' - self.env.resolve_references(tree, docname, self) - #print tree - - for pendingnode in tree.traverse(addnodes.pending_xref): - # This needs work, need to keep track of all targets - # so I don't replace and create hanging refs, which - # crash - if pendingnode.get('reftarget',None) == 'genindex'\ - and self.config.pdf_use_index: - pendingnode.replace_self(nodes.reference(text=pendingnode.astext(), - refuri=pendingnode['reftarget'])) - # FIXME: probably need to handle dangling links to domain-specific indexes - else: - # FIXME: This is from the LaTeX builder and I still don't understand it - # well, and doesn't seem to work - - # resolve :ref:s to distant tex files -- we can't add a cross-reference, - # but append the document name - docname = pendingnode['refdocname'] - sectname = pendingnode['refsectname'] - newnodes = [nodes.emphasis(sectname, sectname)] - for subdir, title in self.titles: - if docname.startswith(subdir): - newnodes.append(nodes.Text(_(' (in '), _(' (in '))) - newnodes.append(nodes.emphasis(title, title)) - newnodes.append(nodes.Text(')', ')')) - break - else: - pass - pendingnode.replace_self(newnodes) - #else: - #pass - return tree - - def get_target_uri(self, docname, typ=None): - #print 'GTU',docname,typ - # FIXME: production lists are not supported yet! - if typ == 'token': - # token references are always inside production lists and must be - # replaced by \token{} in LaTeX - return '@token' - if docname not in self.docnames: - - # It can be a 'main' document: - for doc in self.document_data: - if doc[0]==docname: - return "pdf:"+doc[1]+'.pdf' - # It can be in some other document's toctree - for indexname, toctree in self.env.toctree_includes.items(): - if docname in toctree: - for doc in self.document_data: - if doc[0]==indexname: - return "pdf:"+doc[1]+'.pdf' - # No idea - raise NoUri - else: # Local link - return "" - - def get_relative_uri(self, from_, to, typ=None): - # ignore source path - return self.get_target_uri(to, typ) - - def get_outdated_docs(self): - for docname in self.env.found_docs: - if docname not in self.env.all_docs: - yield docname - continue - targetname = self.env.doc2path(docname, self.outdir, self.out_suffix) - try: - targetmtime = path.getmtime(targetname) - except Exception: - targetmtime = 0 - try: - srcmtime = path.getmtime(self.env.doc2path(docname)) - if srcmtime > targetmtime: - yield docname - except EnvironmentError: - # source doesn't exist anymore - pass - -def genindex_nodes(genindexentries): - indexlabel = _('Index') - indexunder = '='*len(indexlabel) - output=['DUMMY','=====','.. _genindex:\n\n',indexlabel,indexunder,''] - - for key, entries in genindexentries: - #from pudb import set_trace; set_trace() - output.append('.. cssclass:: heading4\n\n%s\n\n'%key) # initial - for entryname, (links, subitems) in entries: - if links: - output.append('`%s <#%s>`_'%(entryname,nodes.make_id(links[0][1]))) - for i,link in enumerate(links[1:]): - output[-1]+=(' `[%s] <#%s>`_ '%(i+1,nodes.make_id(link[1]))) - output.append('') - else: - output.append(entryname) - if subitems: - for subentryname, subentrylinks in subitems: - if subentrylinks: - output.append(' `%s <%s>`_'%(subentryname,subentrylinks[0])) - for i,link in enumerate(subentrylinks[1:]): - output[-1]+=(' `[%s] <%s>`_ '%(i+1,link)) - output.append('') - else: - output.append(subentryname) - output.append('') - - - doctree = docutils.core.publish_doctree('\n'.join(output)) - return doctree[1] - - -class PDFContents(Contents): - - # Mostly copied from Docutils' Contents transformation - - def build_contents(self, node, level=0): - level += 1 - sections=[] - # Replaced this with the for below to make it work for Sphinx - # trees. - - #sections = [sect for sect in node if isinstance(sect, nodes.section)] - for sect in node: - if isinstance(sect,nodes.compound): - for sect2 in sect: - if isinstance(sect2,addnodes.start_of_file): - for sect3 in sect2: - if isinstance(sect3,nodes.section): - sections.append(sect3) - elif isinstance(sect, nodes.section): - sections.append(sect) - entries = [] - autonum = 0 - # FIXME: depth should be taken from :maxdepth: (Issue 320) - depth = self.toc_depth - for section in sections: - title = section[0] - auto = title.get('auto') # May be set by SectNum. - entrytext = self.copy_and_filter(title) - reference = nodes.reference('', '', refid=section['ids'][0], - *entrytext) - ref_id = self.document.set_id(reference) - entry = nodes.paragraph('', '', reference) - item = nodes.list_item('', entry) - if ( self.backlinks in ('entry', 'top') - and title.next_node(nodes.reference) is None): - if self.backlinks == 'entry': - title['refid'] = ref_id - elif self.backlinks == 'top': - title['refid'] = self.toc_id - if level < depth: - subsects = self.build_contents(section, level) - item += subsects - entries.append(item) - if entries: - contents = nodes.bullet_list('', *entries) - if auto: - contents['classes'].append('auto-toc') - return contents - else: - return [] - - -class PDFWriter(writers.Writer): - def __init__(self, - builder, - stylesheets, - language, - breaklevel = 0, - breakside = 'any', - fontpath = [], - fitmode = 'shrink', - compressed = False, - inline_footnotes = False, - splittables = True, - srcdir = '.', - default_dpi = 300, - page_template = 'cutePage', - invariant = False, - real_footnotes = False, - use_toc = True, - use_coverpage = True, - toc_depth = 9999, - use_numbered_links = False, - fit_background_mode = "scale", - section_header_depth = 2, - baseurl = urlunparse(['file',os.getcwd()+os.sep,'','','','']), - style_path = None, - floating_images = False, - config = {}): - writers.Writer.__init__(self) - self.builder = builder - self.output = '' - self.stylesheets = stylesheets - self.__language = language - self.breaklevel = int(breaklevel) - self.breakside = breakside - self.fontpath = fontpath - self.fitmode = fitmode - self.compressed = compressed - self.inline_footnotes = inline_footnotes - self.splittables = splittables - self.highlightlang = builder.config.highlight_language - self.srcdir = srcdir - self.config = config - self.default_dpi = default_dpi - self.page_template = page_template - self.invariant=invariant - self.real_footnotes=real_footnotes - self.use_toc=use_toc - self.use_coverpage=use_coverpage - self.toc_depth=toc_depth - self.use_numbered_links=use_numbered_links - self.fit_background_mode=fit_background_mode - self.section_header_depth=section_header_depth - self.floating_images=floating_images - self.baseurl = baseurl - if hasattr(sys, 'frozen'): - self.PATH = abspath(dirname(sys.executable)) - else: - self.PATH = abspath(dirname(__file__)) - if style_path: - self.style_path = style_path - else: - self.style_path = [self.srcdir] - - - supported = ('pdf') - config_section = 'pdf writer' - config_section_dependencies = ('writers',) - - def translate(self): - visitor = PDFTranslator(self.document, self.builder) - self.document.walkabout(visitor) - lang = self.config.language or 'en' - langmod = get_language_available(lang)[2] - self.docutils_languages = {lang: langmod} - - # Generate Contents topic manually - if self.use_toc: - contents=nodes.topic(classes=['contents']) - contents+=nodes.title('') - contents[0]+=nodes.Text(langmod.labels['contents']) - contents['ids']=['Contents'] - pending=nodes.topic() - contents.append(pending) - pending.details={} - self.document.insert(0,nodes.raw(text='SetPageCounter 1 arabic', format='pdf')) - self.document.insert(0,nodes.raw(text='OddPageBreak %s'%self.page_template, format='pdf')) - self.document.insert(0,contents) - self.document.insert(0,nodes.raw(text='SetPageCounter 1 lowerroman', format='pdf')) - contTrans=PDFContents(self.document) - contTrans.toc_depth = self.toc_depth - contTrans.startnode=pending - contTrans.apply() - - if self.use_coverpage: - # Generate cover page - - # FIXME: duplicate from createpdf, refactor! - - # Find cover template, save it in cover_file - def find_cover(name): - cover_path=[self.srcdir, os.path.expanduser('~/.rst2pdf'), - os.path.join(self.PATH,'templates')] - - # Add the Sphinx template paths - def add_template_path(path): - return os.path.join(self.srcdir, path) - - cover_path.extend(map(add_template_path, self.config.templates_path)) - - cover_file=None - for d in cover_path: - if os.path.exists(os.path.join(d,name)): - cover_file=os.path.join(d,name) - break - return cover_file - - cover_file=find_cover(self.config.pdf_cover_template) - if cover_file is None: - log.error("Can't find cover template %s, using default"%self.custom_cover) - cover_file=find_cover('sphinxcover.tmpl') - - # This is what's used in the python docs because - # Latex does a manual linebreak. This sucks. - authors=self.document.settings.author.split('\\') - - # Feed data to the template, get restructured text. - cover_text = createpdf.renderTemplate(tname=cover_file, - title=self.document.settings.title or visitor.elements['title'], - subtitle='%s %s'%(_('version'),self.config.version), - authors=authors, - date=ustrftime(self.config.today_fmt or _('%B %d, %Y')) - ) - - cover_tree = docutils.core.publish_doctree(cover_text) - self.document.insert(0, cover_tree) - - sio=StringIO() - - if self.invariant: - createpdf.patch_PDFDate() - createpdf.patch_digester() - - createpdf.RstToPdf(sphinx=True, - stylesheets=self.stylesheets, - language=self.__language, - breaklevel=self.breaklevel, - breakside=self.breakside, - fit_mode=self.fitmode, - font_path=self.fontpath, - inline_footnotes=self.inline_footnotes, - highlightlang=self.highlightlang, - splittables=self.splittables, - style_path=self.style_path, - basedir=self.srcdir, - def_dpi=self.default_dpi, - real_footnotes=self.real_footnotes, - numbered_links=self.use_numbered_links, - background_fit_mode=self.fit_background_mode, - baseurl=self.baseurl, - section_header_depth=self.section_header_depth, - floating_images=self.floating_images - ).createPdf(doctree=self.document, - output=sio, - compressed=self.compressed) - self.output=sio.getvalue() - - def supports(self, format): - """This writer supports all format-specific elements.""" - return 1 - - -class PDFTranslator(nodes.SparseNodeVisitor): - def __init__(self, document, builder): - nodes.NodeVisitor.__init__(self, document) - self.builder = builder - self.footnotestack = [] - self.curfilestack = [] - self.highlightlinenothreshold = 999999 - self.top_sectionlevel = 1 - self.footnotecounter=1 - self.curfile=None - self.footnotedict={} - self.this_is_the_title = True - self.in_title = 0 - self.elements = { - 'title': document.settings.title, - } - self.highlightlang = builder.config.highlight_language - - def visit_document(self,node): - self.curfilestack.append(node.get('docname', '')) - self.footnotestack.append('') - - def visit_start_of_file(self,node): - self.curfilestack.append(node['docname']) - self.footnotestack.append(node['docname']) - - def depart_start_of_file(self,node): - self.footnotestack.pop() - self.curfilestack.pop() - - def visit_highlightlang(self, node): - self.highlightlang = node['lang'] - self.highlightlinenothreshold = node['linenothreshold'] - raise nodes.SkipNode - - def visit_versionmodified(self, node): - text = versionlabels[node['type']] % node['version'] - if len(node): - text += ': ' - else: - text += '.' - replacement=nodes.paragraph() - replacement+=nodes.Text(text) - replacement.extend(node.children) - node.parent.replace(node,replacement) - - def depart_versionmodified(self, node): - pass - - def visit_literal_block(self, node): - if 'code' in node['classes']: #Probably a processed code-block - pass - else: - lang=lang_for_block(node.astext(),node.get('language',self.highlightlang)) - content = node.astext().splitlines() - if len(content) > self.highlightlinenothreshold or\ - node.get('linenos',False): - options = { 'linenos': True } - else: - options = {} - - # FIXME: make tab width configurable - content = [c.replace('\t',' ') for c in content] - replacement = nodes.literal_block() - replacement.children = \ - pygments_code_block_directive.code_block_directive( - name = None, - arguments = [lang], - options = options, - content = content, - lineno = False, - content_offset = None, - block_text = None, - state = None, - state_machine = None, - ) - node.parent.replace(node,replacement) - - def visit_footnote(self, node): - node['backrefs']=[ '%s_%s'%(self.footnotestack[-1],x) for x in node['backrefs']] - node['ids']=[ '%s_%s'%(self.footnotestack[-1],x) for x in node['ids']] - node.children[0][0]=nodes.Text(str(self.footnotecounter)) - for id in node['backrefs']: - fnr=self.footnotedict[id] - fnr.children[0]=nodes.Text(str(self.footnotecounter)) - self.footnotecounter+=1 - - def visit_footnote_reference(self, node): - node['ids']=[ '%s_%s'%(self.footnotestack[-1],x) for x in node['ids']] - node['refid']='%s_%s'%(self.footnotestack[-1],node['refid']) - self.footnotedict[node['ids'][0]]=node - - def visit_desc_annotation(self, node): - pass - - def depart_desc_annotation(self, node): - pass - - # This is for graphviz support - def visit_graphviz(self, node): - # Not neat, but I need to send self to my handlers - node['builder']=self - - def visit_Aanode(self, node): - pass - - def depart_Aanode(self, node): - pass - - def visit_productionlist(self, node): - replacement=nodes.literal_block(classes=["code"]) - names = [] - for production in node: - names.append(production['tokenname']) - maxlen = max(len(name) for name in names) - for production in node: - if production['tokenname']: - lastname = production['tokenname'].ljust(maxlen) - n=nodes.strong() - n+=nodes.Text(lastname) - replacement+=n - replacement+=nodes.Text(' ::= ') - else: - replacement+=nodes.Text('%s ' % (' '*len(lastname))) - production.walkabout(self) - replacement.children.extend(production.children) - replacement+=nodes.Text('\n') - node.parent.replace(node,replacement) - raise nodes.SkipNode - def depart_productionlist(self, node): - pass - - def visit_production(self, node): - pass - def depart_production(self, node): - pass - - def visit_OddEvenNode(self, node): - pass - def depart_OddEvenNode(self, node): - pass - -# This is copied from sphinx.highlighting -def lang_for_block(source,lang): - if lang in ('py', 'python'): - if source.startswith('>>>'): - # interactive session - return 'pycon' - else: - # maybe Python -- try parsing it - if try_parse(source): - return 'python' - else: # Guess - return lang_for_block(source,'guess') - elif lang in ('python3', 'py3') and source.startswith('>>>'): - # for py3, recognize interactive sessions, but do not try parsing... - return 'pycon3' - elif lang == 'guess': - try: - #return 'python' - lexer=guess_lexer(source) - return lexer.aliases[0] - except Exception: - return None - else: - return lang - -def try_parse(src): - # Make sure it ends in a newline - src += '\n' - - # Replace "..." by a mark which is also a valid python expression - # (Note, the highlighter gets the original source, this is only done - # to allow "..." in code and still highlight it as Python code.) - mark = "__highlighting__ellipsis__" - src = src.replace("...", mark) - - # lines beginning with "..." are probably placeholders for suite - src = re.sub(r"(?m)^(\s*)" + mark + "(.)", r"\1"+ mark + r"# \2", src) - - # if we're using 2.5, use the with statement - if sys.version_info >= (2, 5): - src = 'from __future__ import with_statement\n' + src - - if isinstance(src, unicode): - # Non-ASCII chars will only occur in string literals - # and comments. If we wanted to give them to the parser - # correctly, we'd have to find out the correct source - # encoding. Since it may not even be given in a snippet, - # just replace all non-ASCII characters. - src = src.encode('ascii', 'replace') - - if parser is None: - return True - try: - parser.suite(src) - except SyntaxError, UnicodeEncodeError: - return False - else: - return True - -def init_math(app): - """ - This is a dummy math extension. - - It's a hack, but if you want math in a PDF via pdfbuilder, and don't want to - enable pngmath or jsmath, then enable this one. - - :copyright: Copyright 2007-2009 by the Sphinx team, see AUTHORS. - :license: BSD, see LICENSE for details. - """ - from sphinx.errors import SphinxError - try: - # Sphinx 0.6.4 and later - from sphinx.ext.mathbase import setup_math as mathbase_setup - except ImportError: - try: - # Sphinx 0.6.3 - from sphinx.ext.mathbase import setup as mathbase_setup - except ImportError, e: - log.error('Error importing sphinx math extension: %s', e) - - class MathExtError(SphinxError): - category = 'Math extension error' - - - def html_visit_math(self, node): - self.body.append(node['latex']) - raise nodes.SkipNode - - def html_visit_displaymath(self, node): - self.body.append(node['latex']) - raise nodes.SkipNode - - mathbase_setup(app, (html_visit_math, None), (html_visit_displaymath, None)) - - -def setup(app): - #Init dummy math extension - init_math(app) - - app.add_builder(PDFBuilder) - # PDF options - app.add_config_value('pdf_documents', [], None) - app.add_config_value('pdf_stylesheets', ['sphinx'], None) - app.add_config_value('pdf_style_path', None, None) - app.add_config_value('pdf_compressed', False, None) - app.add_config_value('pdf_font_path', [], None) - app.add_config_value('pdf_language', 'en_US', None) - app.add_config_value('pdf_fit_mode', '', None), - app.add_config_value('pdf_break_level', 0, None) - app.add_config_value('pdf_inline_footnotes', True, None) - app.add_config_value('pdf_verbosity', 0, None) - app.add_config_value('pdf_use_index', True, None) - app.add_config_value('pdf_domain_indices', True, None) - app.add_config_value('pdf_use_modindex', True, None) - app.add_config_value('pdf_use_coverpage', True, None) - app.add_config_value('pdf_cover_template', 'sphinxcover.tmpl', None) - app.add_config_value('pdf_appendices', [], None) - app.add_config_value('pdf_splittables', True, None) - app.add_config_value('pdf_breakside', 'odd', None) - app.add_config_value('pdf_default_dpi', 300, None) - app.add_config_value('pdf_extensions',['vectorpdf'], None) - app.add_config_value('pdf_page_template','cutePage', None) - app.add_config_value('pdf_invariant','False', None) - app.add_config_value('pdf_real_footnotes','False', None) - app.add_config_value('pdf_use_toc','True', None) - app.add_config_value('pdf_toc_depth',9999, None) - app.add_config_value('pdf_use_numbered_links',False, None) - app.add_config_value('pdf_fit_background_mode',"scale", None) - app.add_config_value('section_header_depth',2, None) - app.add_config_value('pdf_baseurl', urlunparse(['file',os.getcwd()+os.sep,'','','','']), None) - app.add_config_value('pdf_floating_images', False, None) - - author_texescaped = unicode(app.config.copyright)\ - .translate(texescape.tex_escape_map) - project_doc_texescaped = unicode(app.config.project + ' Documentation')\ - .translate(texescape.tex_escape_map) - app.config.pdf_documents.append((app.config.master_doc, - app.config.project, - project_doc_texescaped, - author_texescaped, - 'manual')) diff --git a/dist-packages/rst2pdf/rst2pdf/pygments2json.py b/dist-packages/rst2pdf/rst2pdf/pygments2json.py deleted file mode 100644 index d1bd243f4..000000000 --- a/dist-packages/rst2pdf/rst2pdf/pygments2json.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# See LICENSE.txt for licensing terms -''' -Creates a rst2pdf stylesheet for each pygments style. -''' - -import sys -import os -import simplejson -from pygments.token import STANDARD_TYPES -from pygments import styles as pstyles - - -def css2rl(css): - dstyles = {} - # First create a dumb stylesheet - for key in STANDARD_TYPES: - dstyles["pygments-" + STANDARD_TYPES[key]] = {'parent': 'code'} - - styles = [] - for line in css.splitlines(): - line = line.strip() - sname = "pygments-" + line.split(' ')[0][1:] - style = dstyles.get(sname, {'parent': 'code'}) - options = line.split('{')[1].split('}')[0].split(';') - for option in options: - option = option.strip() - option, argument = option.split(':') - option=option.strip() - argument=argument.strip() - if option == 'color': - style['textColor'] = argument.strip() - if option == 'background-color': - style['backColor'] = argument.strip() - - # These two can come in any order - if option == 'font-weight' and argument == 'bold': - if 'fontName' in style and \ - style['fontName'] == 'stdMonoItalic': - style['fontName'] = 'stdMonoBoldItalic' - else: - style['fontName'] = 'stdMonoBold' - if option == 'font-style' and argument == 'italic': - if 'fontName' in style and style['fontName'] == 'stdBold': - style['fontName'] = 'stdMonoBoldItalic' - else: - style['fontName'] = 'stdMonoItalic' - if style.get('textColor', None) is None: - style['textColor']='black' - styles.append([sname, style]) - - return simplejson.dumps({'styles': styles}, indent=2) - -for name in list(pstyles.get_all_styles()): - css=os.popen('pygmentize -S %s -f html'%name, 'r').read() - open(name+'.json', 'w').write(css2rl(css)) diff --git a/dist-packages/rst2pdf/rst2pdf/pygments2style.py b/dist-packages/rst2pdf/rst2pdf/pygments2style.py deleted file mode 100644 index 6436f7192..000000000 --- a/dist-packages/rst2pdf/rst2pdf/pygments2style.py +++ /dev/null @@ -1,73 +0,0 @@ -# -*- coding: utf-8 -*- -# See LICENSE.txt for licensing terms -''' -Creates a rst2pdf stylesheet for each pygments style. -''' - -import sys -import os -import dumpstyle -from pygments.token import STANDARD_TYPES -from pygments import styles as pstyles - -# First get a list of all possible classes -classnames=set() -for name in list(pstyles.get_all_styles()): - css=os.popen('pygmentize -S %s -f html'%name, 'r').read() - for line in css.splitlines(): - line = line.strip() - sname = "pygments-" + line.split(' ')[0][1:] - classnames.add(sname) - -def css2rl(css): - dstyles = {} - # First create a dumb stylesheet - for key in STANDARD_TYPES: - dstyles["pygments-" + STANDARD_TYPES[key]] = {'parent': 'code'} - seenclassnames=set() - styles = [] - for line in css.splitlines(): - line = line.strip() - sname = "pygments-" + line.split(' ')[0][1:] - seenclassnames.add(sname) - style = dstyles.get(sname, {'parent': 'code'}) - options = line.split('{')[1].split('}')[0].split(';') - for option in options: - option = option.strip() - option, argument = option.split(':') - option=option.strip() - argument=argument.strip() - if option == 'color': - style['textColor'] = argument.strip() - if option == 'background-color': - style['backColor'] = argument.strip() - - # These two can come in any order - if option == 'font-weight' and argument == 'bold': - if 'fontName' in style and \ - style['fontName'] == 'stdMonoItalic': - style['fontName'] = 'stdMonoBoldItalic' - else: - style['fontName'] = 'stdMonoBold' - if option == 'font-style' and argument == 'italic': - if 'fontName' in style and style['fontName'] == 'stdBold': - style['fontName'] = 'stdMonoBoldItalic' - else: - style['fontName'] = 'stdMonoItalic' - if style.get('textColor', None) is None: - style['textColor']='black' - styles.append([sname, style]) - - # Now add default styles for all unseen class names - for sname in classnames-seenclassnames: - style = dstyles.get(sname, {'parent': 'code'}) - style['textColor']='black' - styles.append([sname, style]) - - return dumpstyle.dumps({'styles': styles}) - - - -for name in list(pstyles.get_all_styles()): - css=os.popen('pygmentize -S %s -f html'%name, 'r').read() - open(name+'.style', 'w').write(css2rl(css)) diff --git a/dist-packages/rst2pdf/rst2pdf/pygments_code_block_directive.py b/dist-packages/rst2pdf/rst2pdf/pygments_code_block_directive.py deleted file mode 100755 index a425ed875..000000000 --- a/dist-packages/rst2pdf/rst2pdf/pygments_code_block_directive.py +++ /dev/null @@ -1,396 +0,0 @@ -# -*- coding: utf-8 -*- -#$URL$ -#$Date$ -#$Revision$ - -# :Author: a Pygments author|contributor; Felix Wiemann; Guenter Milde -# :Date: $Date$ -# :Copyright: This module has been placed in the public domain. -# -# This is a merge of `Using Pygments in ReST documents`_ from the pygments_ -# documentation, and a `proof of concept`_ by Felix Wiemann. -# -# ========== =========================================================== -# 2007-06-01 Removed redundancy from class values. -# 2007-06-04 Merge of successive tokens of same type -# (code taken from pygments.formatters.others). -# 2007-06-05 Separate docutils formatter script -# Use pygments' CSS class names (like the html formatter) -# allowing the use of pygments-produced style sheets. -# 2007-06-07 Merge in the formatting of the parsed tokens -# (misnamed as docutils_formatter) as class DocutilsInterface -# 2007-06-08 Failsave implementation (fallback to a standard literal block -# if pygments not found) -# ========== =========================================================== -# -# :: - -"""Define and register a code-block directive using pygments""" - - -# Requirements -# ------------ -# :: - -import codecs -from docutils import nodes -from docutils.parsers.rst import directives - -try: - import pygments - from pygments.lexers import get_lexer_by_name - from pygments.formatters.html import _get_ttype_class -except ImportError: - pass - -from log import log - - -# Customisation -# ------------- -# -# Do not insert inline nodes for the following tokens. -# (You could add e.g. Token.Punctuation like ``['', 'p']``.) :: - -unstyled_tokens = [''] - - -# DocutilsInterface -# ----------------- -# -# This interface class combines code from -# pygments.formatters.html and pygments.formatters.others. -# -# It does not require anything of docutils and could also become a part of -# pygments:: - -class DocutilsInterface(object): - """Parse `code` string and yield "classified" tokens. - - Arguments - - code -- string of source code to parse - language -- formal language the code is written in. - - Merge subsequent tokens of the same token-type. - - Yields the tokens as ``(ttype_class, value)`` tuples, - where ttype_class is taken from pygments.token.STANDARD_TYPES and - corresponds to the class argument used in pygments html output. - - """ - - def __init__(self, code, language, custom_args={}): - self.code = code - self.language = language - self.custom_args = custom_args - - def lex(self): - # Get lexer for language (use text as fallback) - try: - if self.language and unicode(self.language).lower() <> 'none': - lexer = get_lexer_by_name(self.language.lower(), - **self.custom_args - ) - else: - lexer = get_lexer_by_name('text', **self.custom_args) - except ValueError: - log.info("no pygments lexer for %s, using 'text'" \ - % self.language) - # what happens if pygment isn't present ? - lexer = get_lexer_by_name('text') - return pygments.lex(self.code, lexer) - - def join(self, tokens): - """join subsequent tokens of same token-type - """ - tokens = iter(tokens) - (lasttype, lastval) = tokens.next() - for ttype, value in tokens: - if ttype is lasttype: - lastval += value - else: - yield(lasttype, lastval) - (lasttype, lastval) = (ttype, value) - yield(lasttype, lastval) - - def __iter__(self): - """parse code string and yield "clasified" tokens - """ - try: - tokens = self.lex() - except IOError: - log.info("Pygments lexer not found, using fallback") - # TODO: write message to INFO - yield ('', self.code) - return - - for ttype, value in self.join(tokens): - yield (_get_ttype_class(ttype), value) - - -# code_block_directive -# -------------------- -# :: - -def code_block_directive(name, arguments, options, content, lineno, - content_offset, block_text, state, state_machine): - """Parse and classify content of a code_block.""" - if 'include' in options: - try: - if 'encoding' in options: - encoding = options['encoding'] - else: - encoding = 'utf-8' - content = codecs.open(options['include'], 'r', encoding).read().rstrip() - except (IOError, UnicodeError): # no file or problem finding it or reading it - log.error('Error reading file: "%s" L %s' % (options['include'], lineno)) - content = u'' - line_offset = 0 - if content: - # here we define the start-at and end-at options - # so that limit is included in extraction - # this is different than the start-after directive of docutils - # (docutils/parsers/rst/directives/misc.py L73+) - # which excludes the beginning - # the reason is we want to be able to define a start-at like - # def mymethod(self) - # and have such a definition included - - after_text = options.get('start-at', None) - if after_text: - # skip content in include_text before *and NOT incl.* a matching text - after_index = content.find(after_text) - if after_index < 0: - raise state_machine.reporter.severe('Problem with "start-at" option of "%s" ' - 'code-block directive:\nText not found.' % options['start-at']) - # patch mmueller start - # Move the after_index to the beginning of the line with the - # match. - for char in content[after_index:0:-1]: - # codecs always opens binary. This works with '\n', '\r' and - # '\r\n'. We are going backwards, so '\n' is found first - # in '\r\n'. - # Going with .splitlines() seems more appropriate - # but needs a few more changes. - if char == u'\n' or char == u'\r': - break - after_index -= 1 - # patch mmueller end - - content = content[after_index:] - line_offset = len(content[:after_index].splitlines()) - - after_text = options.get('start-after', None) - if after_text: - # skip content in include_text before *and incl.* a matching text - after_index = content.find(after_text) - if after_index < 0: - raise state_machine.reporter.severe('Problem with "start-after" option of "%s" ' - 'code-block directive:\nText not found.' % options['start-after']) - line_offset = len(content[:after_index + len(after_text)].splitlines()) - content = content[after_index + len(after_text):] - - - # same changes here for the same reason - before_text = options.get('end-at', None) - if before_text: - # skip content in include_text after *and incl.* a matching text - before_index = content.find(before_text) - if before_index < 0: - raise state_machine.reporter.severe('Problem with "end-at" option of "%s" ' - 'code-block directive:\nText not found.' % options['end-at']) - content = content[:before_index + len(before_text)] - - before_text = options.get('end-before', None) - if before_text: - # skip content in include_text after *and NOT incl.* a matching text - before_index = content.find(before_text) - if before_index < 0: - raise state_machine.reporter.severe('Problem with "end-before" option of "%s" ' - 'code-block directive:\nText not found.' % options['end-before']) - content = content[:before_index] - - else: - line_offset = options.get('linenos_offset') - content = u'\n'.join(content) - - if 'tabsize' in options: - tabw = options['tabsize'] - else: - tabw = int(options.get('tab-width', 8)) - - content = content.replace('\t',' '*tabw) - - withln = "linenos" in options - if not "linenos_offset" in options: - line_offset = 0 - - language = arguments[0] - # create a literal block element and set class argument - code_block = nodes.literal_block(classes=["code", language]) - - if withln: - lineno = 1 + line_offset - total_lines = content.count('\n') + 1 + line_offset - lnwidth = len(str(total_lines)) - fstr = "\n%%%dd " % lnwidth - code_block += nodes.inline(fstr[1:] % lineno, fstr[1:] % lineno, classes=['linenumber']) - - # parse content with pygments and add to code_block element - for cls, value in DocutilsInterface(content, language, options): - if withln and "\n" in value: - # Split on the "\n"s - values = value.split("\n") - # The first piece, pass as-is - code_block += nodes.Text(values[0], values[0]) - # On the second and later pieces, insert \n and linenos - linenos = range(lineno, lineno + len(values)) - for chunk, ln in zip(values, linenos)[1:]: - if ln <= total_lines: - code_block += nodes.inline(fstr % ln, fstr % ln, classes=['linenumber']) - code_block += nodes.Text(chunk, chunk) - lineno += len(values) - 1 - - elif cls in unstyled_tokens: - # insert as Text to decrease the verbosity of the output. - code_block += nodes.Text(value, value) - else: - code_block += nodes.inline(value, value, classes=["pygments-" + cls]) - - return [code_block] - -# Custom argument validators -# -------------------------- -# :: -# -# Move to separated module?? - -def zero_or_positive_int(argument): - """ - Converts a string into python positive integer including zero. - None is a special case; it is regarded as zero. - """ - if argument is None: - return 0 - elif argument == '0': - return 0 - else: - return directives.positive_int(argument) - - -def string_list(argument): - """ - Converts a space- or comma-separated list of values into a python list - of strings. - (Directive option conversion function) - Based in positive_int_list of docutils.parsers.rst.directives - """ - if ',' in argument: - entries = argument.split(',') - else: - entries = argument.split() - return entries - -def string_bool(argument): - """ - Converts True, true, False, False in python boolean values - """ - if argument is None: - msg = 'argument required but none supplied; choose from "True" or "False"' - raise ValueError(msg) - - elif argument.lower() == 'true': - return True - elif argument.lower() == 'false': - return False - else: - raise ValueError('"%s" unknown; choose from "True" or "False"' - % argument) - -def csharp_unicodelevel(argument): - return directives.choice(argument, ('none', 'basic', 'full')) - -def lhs_litstyle(argument): - return directives.choice(argument, ('bird', 'latex')) - -def raw_compress(argument): - return directives.choice(argument, ('gz', 'bz2')) - - - - -# Register Directive -# ------------------ -# :: - -code_block_directive.arguments = (1, 0, 1) -code_block_directive.content = 1 -code_block_directive.options = {'include': directives.unchanged_required, - 'start-at': directives.unchanged_required, - 'end-at': directives.unchanged_required, - 'start-after': directives.unchanged_required, - 'end-before': directives.unchanged_required, - 'linenos': directives.unchanged, - 'linenos_offset': zero_or_positive_int, - 'tab-width': directives.unchanged, - # generic - 'stripnl' : string_bool, - 'stripall': string_bool, - 'ensurenl': string_bool, - 'tabsize' : directives.positive_int, - 'encoding': directives.encoding, - # Lua - 'func_name_hightlighting':string_bool, - 'disabled_modules': string_list, - # Python Console - 'python3': string_bool, - # Delphi - 'turbopascal':string_bool, - 'delphi' :string_bool, - 'freepascal': string_bool, - 'units': string_list, - # Modula2 - 'pim' : string_bool, - 'iso' : string_bool, - 'objm2' : string_bool, - 'gm2ext': string_bool, - # CSharp - 'unicodelevel' : csharp_unicodelevel, - # Literate haskell - 'litstyle' : lhs_litstyle, - # Raw - 'compress': raw_compress, - # Rst - 'handlecodeblocks': string_bool, - # Php - 'startinline': string_bool, - 'funcnamehighlighting': string_bool, - 'disabledmodules': string_list, - } - - - -# .. _doctutils: http://docutils.sf.net/ -# .. _pygments: http://pygments.org/ -# .. _Using Pygments in ReST documents: http://pygments.org/docs/rstdirective/ -# .. _proof of concept: -# http://article.gmane.org/gmane.text.docutils.user/3689 -# -# Test output -# ----------- -# -# If called from the command line, call the docutils publisher to render the -# input:: - -if __name__ == '__main__': - from docutils.core import publish_cmdline, default_description - from docutils.parsers.rst import directives - directives.register_directive('code-block', code_block_directive) - description = "code-block directive test output" + default_description - try: - import locale - locale.setlocale(locale.LC_ALL, '') - except Exception: - pass - publish_cmdline(writer_name='html', description=description) diff --git a/dist-packages/rst2pdf/rst2pdf/rson.py b/dist-packages/rst2pdf/rst2pdf/rson.py deleted file mode 100644 index c1be68739..000000000 --- a/dist-packages/rst2pdf/rst2pdf/rson.py +++ /dev/null @@ -1,917 +0,0 @@ - -################################################################################ -#### NOTE: THIS IS STILL IN DEVELOPMENT: #### -#### #### -#### - No encoder #### -#### - Needs more tests! #### -#### #### -################################################################################ - -''' -RSON -- readable serial object notation - -RSON is a superset of JSON with relaxed syntax for human readability. - -Simple usage example: - import rson - obj = rson.loads(source) - -Additional documentation available at: - -http://code.google.com/p/rson/ -''' - -__version__ = '0.08' - -__author__ = 'Patrick Maupin ' - -__copyright__ = ''' -Copyright (c) 2010, Patrick Maupin. All rights reserved. - - Permission is hereby granted, free of charge, to any person - obtaining a copy of this software and associated documentation - files (the "Software"), to deal in the Software without - restriction, including without limitation the rights to use, - copy, modify, merge, publish, distribute, sublicense, and/or sell - copies of the Software, and to permit persons to whom the - Software is furnished to do so, subject to the following - conditions: - - The above copyright notice and this permission notice shall be - included in all copies or substantial portions of the Software. - - THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, - EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES - OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND - NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT - HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, - WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING - FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR - OTHER DEALINGS IN THE SOFTWARE. - ''' - -import bisect -import re -import sys - -class RSONDecodeError(ValueError): - pass - -class Tokenizer(list): - ''' The RSON tokenizer uses re.split() to rip the source string - apart into smaller strings which may or may not be true standalone - tokens. Sufficient information is maintained to put Humpty-Dumpty - back together when necessary. - - The tokens are kept in a reversed list. This makes retrieving - the next token a low-cost pop() operation from the end of the list, - and facilitates arbitrary lookahead operations. - - Each token is a tuple, containing the following elements: - [0] Negative character offset of token within the source string. - A negative offset is used so that the tokens are sorted - properly for bisect() for special operations. - [1] single-character string usually containing a character - which represents the token type (often the entire token). - [2] string containing entire token - [3] string (possibly null) containing whitespace after token - [4] Indentation value of line containing token - (\n followed by 0 or more spaces) - [5] Line number of token - [6] Tokenizer object that the token belongs to (for error reporting) - ''' - - # Like Python, indentation is special. I originally planned on making - # the space character the only valid indenter, but that got messy - # when combined with the desire to be 100% JSON compatible, so, like - # JSON, you can indent with any old whitespace, but if you mix and - # match, you might be in trouble (like with Python). - # - # An indentation is always the preceding EOL plus optional spaces, - # so we create a dummy EOL for the very start of the string. - # Could also have an embedded comment - indentation = r'\n[ \t\v\f]*(?:#.*)?' - - # RSON syntax delimiters are tokenized separately from everything else. - delimiterset = set(' { } [ ] : = , '.split()) - - re_delimiterset = ''.join(delimiterset).replace(']', r'\]') - - # Create a RE pattern for the delimiters - delimiter_pattern = '[%s]' % re_delimiterset - - # A regular quoted string must terminate before the end of the line, - # and \ can be used as the internal escape character. - quoted_string = r'"(?:[^"\n\\]|\\.)*(?:"|(?=\n))' - - # A triple-quoted string can contain any characters. The only escape - # processing that is done on them is to allow a backslash in front of - # another set of triple quotes. We only look for the start of one of - # these suckers in the first pass, and then go find the real string - # later. This keeps us from getting our knickers in a twist on - # regular strings vs triple-quoted vs comments, etc. - - triple_quoted_string = '"""' - - # Any non-whitespace, non-delimiter, group of characters is in the "other" - # category. This group can have embedded whitespace, but ends on a - # non-whitespace character. - - other = r'[\S](?:[^%s\n]*[^%s\s])*' % (re_delimiterset, re_delimiterset) - - pattern = '(%s)' % '|'.join([ - delimiter_pattern, - triple_quoted_string, - quoted_string, - other, - indentation, - ]) - - splitter = re.compile(pattern).split - - @classmethod - def factory(cls, len=len, iter=iter, unicode=unicode, isinstance=isinstance): - splitter = cls.splitter - delimiterset = set(cls.delimiterset) | set('"') - - def newstring(source, client): - self = cls() - self.client = client - - # Deal with 8 bit bytes for now - if isinstance(source, unicode): - source = source.encode('utf-8') - - # Convert MS-DOS or Mac line endings to the one true way - source = source.replace('\r\n', '\n').replace('\r', '\n') - sourcelist = splitter(source) - - # Get the indentation at the start of the file - indentation = '\n' + sourcelist[0] - linenum = 1 - linestart = offset = 0 - - # Set up to iterate over the source and add to the destination list - sourceiter = iter(sourcelist) - next = sourceiter.next - offset -= len(next()) - - # Strip comment from first line - if len(sourcelist) > 1 and sourcelist[1].startswith('#'): - i = 1 - while len(sourcelist) > i and not sourcelist[i].startswith('\n'): - i += 1 - offset -= len(next()) - - - # Preallocate the list - self.append(None) - self *= len(sourcelist) / 2 + 1 - index = 0 - - # Create all the tokens - for token in sourceiter: - whitespace = next() - t0 = token[0] - if t0 not in delimiterset: - if t0 == '\n': - linenum += 1 - indentation = token - offset -= len(token) - linestart = offset - continue - else: - t0 = 'X' - self[index] = (offset, t0, token, whitespace, indentation, linenum, self) - index += 1 - offset -= len(token) + len(whitespace) - - # Add a sentinel - self[index] = (offset, '@', '@', '', '', linenum + 1, self) - self[index+1:] = [] - - # Put everything we need in the actual object instantiation - self.reverse() - self.source = source - self.next = self.pop - self.push = self.append - return self - return newstring - - def peek(self): - return self[-1] - - def lookahead(self, index=0): - return self[-1 - index] - - @staticmethod - def sourceloc(token): - ''' Return the source location for a given token - ''' - lineno = token[5] - colno = offset = -token[0] + 1 - if lineno != 1: - colno -= token[-1].source.rfind('\n', 0, offset) + 1 - return offset, lineno, colno - - @classmethod - def error(cls, s, token): - ''' error performs generic error reporting for tokens - ''' - offset, lineno, colno = cls.sourceloc(token) - - if token[1] == '@': - loc = 'at end of string' - else: - text = token[2] - loc = 'line %s, column %s, text %s' % (lineno, colno, repr(text[:20])) - - err = RSONDecodeError('%s: %s' % (s, loc)) - err.pos = offset - err.lineno = lineno - err.colno = colno - raise err - - -def make_hashable(what): - try: - hash(what) - return what - except TypeError: - if isinstance(what, dict): - return tuple(sorted(make_hashable(x) for x in what.iteritems())) - return tuple(make_hashable(x) for x in what) - -class BaseObjects(object): - - # These hooks allow compatibility with simplejson - object_hook = None - object_pairs_hook = None - array_hook = None - - # Stock object constructor does not cope with no keys - disallow_missing_object_keys = True - - # Stock object constructor copes with multiple keys just fine - disallow_multiple_object_keys = False - - # Default JSON requires string keys - disallow_nonstring_keys = True - - class default_array(list): - def __new__(self, startlist, token): - return list(startlist) - - class default_object(dict): - ''' By default, RSON objects are dictionaries that - allow attribute access to their existing contents. - ''' - - def __getattr__(self, key): - return self[key] - def __setattr__(self, key, value): - self[key]=value - - def append(self, itemlist): - mydict = self - value = itemlist.pop() - itemlist = [make_hashable(x) for x in itemlist] - lastkey = itemlist.pop() - - if itemlist: - itemlist.reverse() - while itemlist: - key = itemlist.pop() - subdict = mydict.get(key) - if not isinstance(subdict, dict): - subdict = mydict[key] = type(self)() - mydict = subdict - if isinstance(value, dict): - oldvalue = mydict.get(lastkey) - if isinstance(oldvalue, dict): - oldvalue.update(value) - return - mydict[lastkey] = value - - def get_result(self, token): - return self - - def object_type_factory(self, dict=dict, tuple=tuple): - ''' This function returns constructors for RSON objects and arrays. - It handles simplejson compatible hooks as well. - ''' - object_hook = self.object_hook - object_pairs_hook = self.object_pairs_hook - - if object_pairs_hook is not None: - class build_object(list): - def get_result(self, token): - return object_pairs_hook([tuple(x) for x in self]) - self.disallow_multiple_object_keys = True - self.disallow_nonstring_keys = True - elif object_hook is not None: - mydict = dict - class build_object(list): - def get_result(self, token): - return object_hook(mydict(self)) - self.disallow_multiple_object_keys = True - self.disallow_nonstring_keys = True - else: - build_object = self.default_object - - build_array = self.array_hook or self.default_array - return build_object, build_array - - -class Dispatcher(object): - ''' Assumes that this is mixed-in to a class with an - appropriate parser_factory() method. - ''' - - @classmethod - def dispatcher_factory(cls, hasattr=hasattr, tuple=tuple, sorted=sorted): - - self = cls() - parser_factory = self.parser_factory - parsercache = {} - cached = parsercache.get - default_loads = parser_factory() - - def loads(s, **kw): - if not kw: - return default_loads(s) - - key = tuple(sorted(kw.iteritems())) - func = cached(key) - if func is None: - # Begin some real ugliness here -- just modify our instance to - # have the correct user variables for the initialization functions. - # Seems to speed up simplejson testcases a bit. - self.__dict__ = dict((x,y) for (x,y) in key if y is not None) - func = parsercache[key] = parser_factory() - - return func(s) - - return loads - - -class QuotedToken(object): - ''' Subclass or replace this if you don't like quoted string handling - ''' - - parse_quoted_str = staticmethod( - lambda token, s, unicode=unicode: unicode(s, 'utf-8')) - parse_encoded_chr = unichr - parse_join_str = u''.join - cachestrings = False - - quoted_splitter = re.compile(r'(\\u[0-9a-fA-F]{4}|\\.|")').split - quoted_mapper = { '\\\\' : u'\\', - r'\"' : u'"', - r'\/' : u'/', - r'\b' : u'\b', - r'\f' : u'\f', - r'\n' : u'\n', - r'\r' : u'\r', - r'\t' : u'\t'}.get - - def quoted_parse_factory(self, int=int, iter=iter, len=len): - quoted_splitter = self.quoted_splitter - quoted_mapper = self.quoted_mapper - parse_quoted_str = self.parse_quoted_str - parse_encoded_chr = self.parse_encoded_chr - parse_join_str = self.parse_join_str - cachestrings = self.cachestrings - triplequoted = self.triplequoted - - allow_double = sys.maxunicode > 65535 - - def badstring(token, special): - if token[2] != '"""' or triplequoted is None: - token[-1].error('Invalid character in quoted string: %s' % repr(special), token) - result = parse_quoted_str(token, triplequoted(token)) - if cachestrings: - result = token[-1].stringcache(result, result) - return result - - def parse(token, next): - s = token[2] - if len(s) < 2 or not (s[0] == s[-1] == '"'): - token[-1].error('No end quote on string', token) - s = quoted_splitter(s[1:-1]) - result = parse_quoted_str(token, s[0]) - if len(s) > 1: - result = [result] - append = result.append - s = iter(s) - next = s.next - next() - for special in s: - nonmatch = next() - remap = quoted_mapper(special) - if remap is None: - if len(special) == 6: - uni = int(special[2:], 16) - if 0xd800 <= uni <= 0xdbff and allow_double: - uni, nonmatch = parse_double_unicode(uni, nonmatch, next, token) - remap = parse_encoded_chr(uni) - else: - return badstring(token, special) - append(remap) - append(parse_quoted_str(token, nonmatch)) - - result = parse_join_str(result) - if cachestrings: - result = token[-1].stringcache(result, result) - return result - - - def parse_double_unicode(uni, nonmatch, next, token): - ''' Munged version of UCS-4 code pair stuff from - simplejson. - ''' - ok = True - try: - uni2 = next() - nonmatch2 = next() - except: - ok = False - ok = ok and not nonmatch and uni2.startswith(r'\u') and len(uni2) == 6 - if ok: - nonmatch = uni2 - uni = 0x10000 + (((uni - 0xd800) << 10) | (int(uni2[2:], 16) - 0xdc00)) - return uni, nonmatch2 - token[-1].error('Invalid second ch in double sequence: %s' % repr(nonmatch), token) - - return parse - - @staticmethod - def triplequoted(token): - tokens = token[-1] - source = tokens.source - result = [] - start = 3 - token[0] - while 1: - end = source.find('"""', start) - if end < 0: - tokens.error('Did not find end for triple-quoted string', token) - if source[end-1] != '\\': - break - result.append(source[start:end-1]) - result.append('"""') - start = end + 3 - result.append(source[start:end]) - offset = bisect.bisect(tokens, (- end -2, )) - tokens[offset:] = [] - return ''.join(result) - - -class UnquotedToken(object): - ''' Subclass or replace this if you don't like the unquoted - token handling. This is designed to be a superset of JSON: - - - Integers allowed to be expressed in octal, binary, or hex - as well as decimal. - - - Integers can have embedded underscores. - - - Non-match of a special token will just be wrapped as a unicode - string. - - - Numbers can be preceded by '+' as well s '-' - - Numbers can be left-zero-filled - - If a decimal point is present, digits are required on either side, - but not both sides - ''' - - use_decimal = False - parse_int = staticmethod( - lambda s: int(s.replace('_', ''), 0)) - parse_float = float - parse_unquoted_str = staticmethod( - lambda token, unicode=unicode: unicode(token[2], 'utf-8')) - - special_strings = dict(true = True, false = False, null = None) - - unquoted_pattern = r''' - (?: - true | false | null | # Special JSON names - (?P - [-+]? # Optional sign - (?: - 0[xX](_*[0-9a-fA-F]+)+ | # Hex integer - 0[bB](_*[01]+)+ | # binary integer - 0[oO](_*[0-7]+)+ | # Octal integer - \d+(_*\d+)* | # Decimal integer - (?P - (?: - \d+(\.\d*)? | # One or more digits, - # optional frac - \.\d+ # Leading decimal point - ) - (?:[eE][-+]?\d+)? # Optional exponent - ) - ) - ) - ) \Z # Match end of string - ''' - - def unquoted_parse_factory(self): - unquoted_match = re.compile(self.unquoted_pattern, - re.VERBOSE).match - - parse_unquoted_str = self.parse_unquoted_str - parse_float = self.parse_float - parse_int = self.parse_int - special = self.special_strings - - if self.use_decimal: - from decimal import Decimal - parse_float = Decimal - - def parse(token, next): - s = token[2] - m = unquoted_match(s) - if m is None: - return parse_unquoted_str(token) - if m.group('num') is None: - return special[s] - if m.group('float') is None: - return parse_int(s.replace('_', '')) - return parse_float(s) - - return parse - - -class EqualToken(object): - ''' Subclass or replace this if you don't like the = string handling - ''' - - encode_equals_str = None - - @staticmethod - def parse_equals(stringlist, indent, token): - ''' token probably not needed except maybe for error reporting. - Replace this with something that does what you want. - ''' - # Strip any trailing whitespace to the right - stringlist = [x.rstrip() for x in stringlist] - - # Strip any embedded comments - stringlist = [x for x in stringlist if x.startswith(indent) or not x] - - # Strip trailing whitespace down below - while stringlist and not stringlist[-1]: - stringlist.pop() - - # Special cases for single line - if not stringlist: - return '' - if len(stringlist) == 1: - return stringlist[0].strip() - - # Strip whitespace on first line - if stringlist and not stringlist[0]: - stringlist.pop(0) - - # Dedent all the strings to one past the equals - dedent = len(indent) - stringlist = [x[dedent:] for x in stringlist] - - # Figure out if should dedent one more - if min((not x and 500 or len(x) - len(x.lstrip())) for x in stringlist): - stringlist = [x[1:] for x in stringlist] - - # Give every line its own linefeed (keeps later parsing from - # treating this as a number, for example) - stringlist.append('') - - # Return all joined up as a single unicode string - return '\n'.join(stringlist) - - def equal_parse_factory(self, read_unquoted): - - parse_equals = self.parse_equals - encoder = self.encode_equals_str - - if encoder is None: - encoder = read_unquoted - - def parse(firsttok, next): - tokens = firsttok[-1] - indent, linenum = firsttok[4:6] - token = next() - while token[5] == linenum: - token = next() - while token[4] > indent: - token = next() - tokens.push(token) - - # Get rid of \n, and indent one past = - indent = indent[1:] + ' ' - - bigstring = tokens.source[-firsttok[0] + 1 : -token[0]] - stringlist = bigstring.split('\n') - stringlist[0] = indent + stringlist[0] - token = list(firsttok) - token[1:3] = '=', parse_equals(stringlist, indent, firsttok) - return encoder(token, next) - - return parse - - -class RsonParser(object): - ''' Parser for RSON - ''' - - disallow_trailing_commas = True - disallow_rson_sublists = False - disallow_rson_subdicts = False - - @staticmethod - def post_parse(tokens, value): - return value - - def client_info(self, parse_locals): - pass - - def parser_factory(self, len=len, type=type, isinstance=isinstance, list=list, basestring=basestring): - - Tokenizer = self.Tokenizer - tokenizer = Tokenizer.factory() - error = Tokenizer.error - - read_unquoted = self.unquoted_parse_factory() - read_quoted = self.quoted_parse_factory() - parse_equals = self.equal_parse_factory(read_unquoted) - new_object, new_array = self.object_type_factory() - disallow_trailing_commas = self.disallow_trailing_commas - disallow_missing_object_keys = self.disallow_missing_object_keys - key_handling = [disallow_missing_object_keys, self.disallow_multiple_object_keys] - disallow_nonstring_keys = self.disallow_nonstring_keys - post_parse = self.post_parse - - - def bad_array_element(token, next): - error('Expected array element', token) - - def bad_dict_key(token, next): - error('Expected dictionary key', token) - - def bad_dict_value(token, next): - error('Expected dictionary value', token) - - def bad_top_value(token, next): - error('Expected start of object', token) - - def bad_unindent(token, next): - error('Unindent does not match any outer indentation level', token) - - def bad_indent(token, next): - error('Unexpected indentation', token) - - def read_json_array(firsttok, next): - result = new_array([], firsttok) - append = result.append - while 1: - token = next() - t0 = token[1] - if t0 == ']': - if result and disallow_trailing_commas: - error('Unexpected trailing comma', token) - break - append(json_value_dispatch(t0, bad_array_element)(token, next)) - delim = next() - t0 = delim[1] - if t0 == ',': - continue - if t0 != ']': - if t0 == '@': - error('Unterminated list (no matching "]")', firsttok) - error('Expected "," or "]"', delim) - break - return result - - def read_json_dict(firsttok, next): - result = new_object() - append = result.append - while 1: - token = next() - t0 = token[1] - if t0 == '}': - if result and disallow_trailing_commas: - error('Unexpected trailing comma', token) - break - key = json_value_dispatch(t0, bad_dict_key)(token, next) - if disallow_nonstring_keys and not isinstance(key, basestring): - error('Non-string key %s not supported' % repr(key), token) - token = next() - t0 = token[1] - if t0 != ':': - error('Expected ":" after dict key %s' % repr(key), token) - token = next() - t0 = token[1] - value = json_value_dispatch(t0, bad_dict_value)(token, next) - append([key, value]) - delim = next() - t0 = delim[1] - if t0 == ',': - continue - if t0 != '}': - if t0 == '@': - error('Unterminated dict (no matching "}")', firsttok) - error('Expected "," or "}"', delim) - break - return result.get_result(firsttok) - - def read_rson_unquoted(firsttok, next): - toklist = [] - linenum = firsttok[5] - while 1: - token = next() - if token[5] != linenum or token[1] in ':=': - break - toklist.append(token) - firsttok[-1].push(token) - if not toklist: - return read_unquoted(firsttok, next) - s = list(firsttok[2:4]) - for tok in toklist: - s.extend(tok[2:4]) - result = list(firsttok) - result[3] = s.pop() - result[2] = ''.join(s) - return read_unquoted(result, next) - - json_value_dispatch = {'X':read_unquoted, '[':read_json_array, - '{': read_json_dict, '"':read_quoted}.get - - - rson_value_dispatch = {'X':read_rson_unquoted, '[':read_json_array, - '{': read_json_dict, '"':read_quoted, - '=': parse_equals} - - if self.disallow_rson_sublists: - rson_value_dispatch['['] = read_rson_unquoted - - if self.disallow_rson_subdicts: - rson_value_dispatch['{'] = read_rson_unquoted - - rson_key_dispatch = rson_value_dispatch.copy() - if disallow_missing_object_keys: - del rson_key_dispatch['='] - - rson_value_dispatch = rson_value_dispatch.get - rson_key_dispatch = rson_key_dispatch.get - - empty_object = new_object().get_result(None) - empty_array = new_array([], None) - empty_array_type = type(empty_array) - empties = empty_object, empty_array - - def parse_recurse_array(stack, next, token, result): - arrayindent, linenum = stack[-1][4:6] - linenum -= not result - while 1: - thisindent, newlinenum = token[4:6] - if thisindent != arrayindent: - if thisindent < arrayindent: - return result, token - if result: - stack.append(token) - lastitem = result[-1] - if lastitem == empty_array: - result[-1], token = parse_recurse_array(stack, next, token, lastitem) - elif lastitem == empty_object: - result[-1], token = parse_recurse_dict(stack, next, token, lastitem) - else: - result = None - if result: - stack.pop() - thisindent, newlinenum = token[4:6] - if thisindent <= arrayindent: - continue - bad_unindent(token, next) - bad_indent(token, next) - if newlinenum <= linenum: - if token[1] in '=:': - error('Cannot mix list elements with dict (key/value) elements', token) - error('Array elements must either be on separate lines or enclosed in []', token) - linenum = newlinenum - value = rson_value_dispatch(token[1], bad_top_value)(token, next) - result.append(value) - token = next() - - def parse_one_dict_entry(stack, next, token, entry, mydict): - arrayindent, linenum = stack[-1][4:6] - while token[1] == ':': - tok1 = next() - thisindent, newlinenum = tok1[4:6] - if newlinenum == linenum: - value = rson_value_dispatch(tok1[1], bad_top_value)(tok1, next) - token = next() - entry.append(value) - continue - if thisindent <= arrayindent: - error('Expected indented line after ":"', token) - token = tok1 - - if not entry: - error('Expected key', token) - - thisindent, newlinenum = token[4:6] - if newlinenum == linenum and token[1] == '=': - value = rson_value_dispatch(token[1], bad_top_value)(token, next) - entry.append(value) - token = next() - elif thisindent > arrayindent: - stack.append(token) - value, token = parse_recurse(stack, next) - if entry[-1] in empties: - if type(entry[-1]) is type(value): - entry[-1] = value - else: - error('Cannot load %s into %s' % (type(value), type(entry[-1])), stack[-1]) - elif len(value) == 1 and type(value) is empty_array_type: - entry.extend(value) - else: - entry.append(value) - stack.pop() - length = len(entry) - if length != 2 and key_handling[length > 2]: - if length < 2: - error('Expected ":" or "=", or indented line', token) - error("rson client's object handlers do not support chained objects", token) - if disallow_nonstring_keys: - for key in entry[:-1]: - if not isinstance(key, basestring): - error('Non-string key %s not supported' % repr(key), token) - mydict.append(entry) - return token - - def parse_recurse_dict(stack, next, token, result): - arrayindent = stack[-1][4] - while 1: - thisindent = token[4] - if thisindent != arrayindent: - if thisindent < arrayindent: - return result.get_result(token), token - bad_unindent(token, next) - key = rson_key_dispatch(token[1], bad_top_value)(token, next) - stack[-1] = token - token = parse_one_dict_entry(stack, next, next(), [key], result) - - def parse_recurse(stack, next, tokens=None): - ''' parse_recurse ALWAYS returns a list or a dict. - (or the user variants thereof) - It is up to the caller to determine that it was an array - of length 1 and strip the contents out of the array. - ''' - firsttok = stack[-1] - value = rson_value_dispatch(firsttok[1], bad_top_value)(firsttok, next) - - # We return an array if the next value is on a new line and either - # is at the same indentation, or the current value is an empty list - - token = next() - if (token[5] != firsttok[5] and - (token[4] <= firsttok[4] or - value in empties) and disallow_missing_object_keys): - result = new_array([value], firsttok) - if tokens is not None: - tokens.top_object = result - return parse_recurse_array(stack, next, token, result) - - # Otherwise, return a dict - result = new_object() - if tokens is not None: - tokens.top_object = result - token = parse_one_dict_entry(stack, next, token, [value], result) - return parse_recurse_dict(stack, next, token, result) - - - def parse(source): - tokens = tokenizer(source, None) - tokens.stringcache = {}.setdefault - tokens.client_info = client_info - next = tokens.next - value, token = parse_recurse([next()], next, tokens) - if token[1] != '@': - error('Unexpected additional data', token) - - # If it's a single item and we don't have a specialized - # object builder, just strip the outer list. - if (len(value) == 1 and isinstance(value, list) - and disallow_missing_object_keys): - value = value[0] - return post_parse(tokens, value) - - client_info = self.client_info(locals()) - - return parse - - -class RsonSystem(RsonParser, UnquotedToken, QuotedToken, EqualToken, Dispatcher, BaseObjects): - Tokenizer = Tokenizer - -loads = RsonSystem.dispatcher_factory() diff --git a/dist-packages/rst2pdf/rst2pdf/sectnumlinks.py b/dist-packages/rst2pdf/rst2pdf/sectnumlinks.py deleted file mode 100644 index 1e4269cfc..000000000 --- a/dist-packages/rst2pdf/rst2pdf/sectnumlinks.py +++ /dev/null @@ -1,19 +0,0 @@ -import docutils - -class SectNumFolder(docutils.nodes.SparseNodeVisitor): - def __init__(self, document): - docutils.nodes.SparseNodeVisitor.__init__(self, document) - self.sectnums = {} - - def visit_generated(self, node): - for i in node.parent.parent['ids']: - self.sectnums[i]=node.parent.astext().replace(u'\xa0\xa0\xa0',' ') - -class SectRefExpander(docutils.nodes.SparseNodeVisitor): - def __init__(self, document, sectnums): - docutils.nodes.SparseNodeVisitor.__init__(self, document) - self.sectnums = sectnums - - def visit_reference(self, node): - if node.get('refid', None) in self.sectnums: - node.children=[docutils.nodes.Text('%s '%self.sectnums[node.get('refid')])] diff --git a/dist-packages/rst2pdf/rst2pdf/sinker.py b/dist-packages/rst2pdf/rst2pdf/sinker.py deleted file mode 100644 index aaf510c72..000000000 --- a/dist-packages/rst2pdf/rst2pdf/sinker.py +++ /dev/null @@ -1,33 +0,0 @@ -# -*- coding: utf-8 -*- - -from reportlab.platypus.flowables import _listWrapOn, _FUZZ, Flowable - - -class Sinker(Flowable): - '''A flowable that always takes the rest of the frame. - It then draws its contents (a list of sub-flowables) - at the bottom of that space''' - - def __init__(self, content): - self.content = content - - def wrap (self, aW, aH): - self.width, self.height = _listWrapOn(self.content, aW, None) - return self.width, aH - - def draw (self): - canv = self.canv - canv.saveState() - x = canv._x - y = canv._y - y += self.height - aW = self.width - for c in self.content: - w, h = c.wrapOn(canv, aW, 0xfffffff) - if (w < _FUZZ or h < _FUZZ) and not getattr(c, '_ZEROSIZE', None): - continue - y -= h - canv.saveState() - c.drawOn(canv, x, y, _sW=aW - w) - canv.restoreState() - canv.restoreState() diff --git a/dist-packages/rst2pdf/rst2pdf/smartypants.py b/dist-packages/rst2pdf/rst2pdf/smartypants.py deleted file mode 100644 index 68740305c..000000000 --- a/dist-packages/rst2pdf/rst2pdf/smartypants.py +++ /dev/null @@ -1,903 +0,0 @@ -#!/usr/bin/python - -r""" -============== -smartypants.py -============== - ----------------------------- -SmartyPants ported to Python ----------------------------- - -Ported by `Chad Miller`_ -Copyright (c) 2004, 2007 Chad Miller - -original `SmartyPants`_ by `John Gruber`_ -Copyright (c) 2003 John Gruber - - -Synopsis -======== - -A smart-quotes plugin for Pyblosxom_. - -The priginal "SmartyPants" is a free web publishing plug-in for Movable Type, -Blosxom, and BBEdit that easily translates plain ASCII punctuation characters -into "smart" typographic punctuation HTML entities. - -This software, *smartypants.py*, endeavours to be a functional port of -SmartyPants to Python, for use with Pyblosxom_. - - -Description -=========== - -SmartyPants can perform the following transformations: - -- Straight quotes ( " and ' ) into "curly" quote HTML entities -- Backticks-style quotes (\`\`like this'') into "curly" quote HTML entities -- Dashes (``--`` and ``---``) into en- and em-dash entities -- Three consecutive dots (``...`` or ``. . .``) into an ellipsis entity - -This means you can write, edit, and save your posts using plain old -ASCII straight quotes, plain dashes, and plain dots, but your published -posts (and final HTML output) will appear with smart quotes, em-dashes, -and proper ellipses. - -SmartyPants does not modify characters within ``
``, ````, ````,
-```` or ``

He said, "'Quoted' words in a larger quote."

- str = re.sub(r""""'(?=\w)""", """“‘""", str) - str = re.sub(r"""'"(?=\w)""", """‘“""", str) - - # Special case for decade abbreviations (the '80s): - str = re.sub(r"""\b'(?=\d{2}s)""", r"""’""", str) - - close_class = r"""[^\ \t\r\n\[\{\(\-]""" - dec_dashes = r"""–|—""" - - # Get most opening single quotes: - opening_single_quotes_regex = re.compile(r""" - ( - \s | # a whitespace char, or -   | # a non-breaking space entity, or - -- | # dashes, or - &[mn]dash; | # named dash entities - %s | # or decimal entities - &\#x201[34]; # or hex - ) - ' # the quote - (?=\w) # followed by a word character - """ % (dec_dashes,), re.VERBOSE) - str = opening_single_quotes_regex.sub(r"""\1‘""", str) - - closing_single_quotes_regex = re.compile(r""" - (%s) - ' - (?!\s | s\b | \d) - """ % (close_class,), re.VERBOSE) - str = closing_single_quotes_regex.sub(r"""\1’""", str) - - closing_single_quotes_regex = re.compile(r""" - (%s) - ' - (\s | s\b) - """ % (close_class,), re.VERBOSE) - str = closing_single_quotes_regex.sub(r"""\1’\2""", str) - - # Any remaining single quotes should be opening ones: - str = re.sub(r"""'""", r"""‘""", str) - - # Get most opening double quotes: - opening_double_quotes_regex = re.compile(r""" - ( - \s | # a whitespace char, or -   | # a non-breaking space entity, or - -- | # dashes, or - &[mn]dash; | # named dash entities - %s | # or decimal entities - &\#x201[34]; # or hex - ) - " # the quote - (?=\w) # followed by a word character - """ % (dec_dashes,), re.VERBOSE) - str = opening_double_quotes_regex.sub(r"""\1“""", str) - - # Double closing quotes: - closing_double_quotes_regex = re.compile(r""" - #(%s)? # character that indicates the quote should be closing - " - (?=\s) - """ % (close_class,), re.VERBOSE) - str = closing_double_quotes_regex.sub(r"""”""", str) - - closing_double_quotes_regex = re.compile(r""" - (%s) # character that indicates the quote should be closing - " - """ % (close_class,), re.VERBOSE) - str = closing_double_quotes_regex.sub(r"""\1”""", str) - - # Any remaining quotes should be opening ones. - str = re.sub(r'"', r"""“""", str) - - return str - - -def educateBackticks(str): - """ - Parameter: String. - Returns: The string, with ``backticks'' -style double quotes - translated into HTML curly quote entities. - Example input: ``Isn't this fun?'' - Example output: “Isn't this fun?” - """ - - str = re.sub(r"""``""", r"""“""", str) - str = re.sub(r"""''""", r"""”""", str) - return str - - -def educateSingleBackticks(str): - """ - Parameter: String. - Returns: The string, with `backticks' -style single quotes - translated into HTML curly quote entities. - - Example input: `Isn't this fun?' - Example output: ‘Isn’t this fun?’ - """ - - str = re.sub(r"""`""", r"""‘""", str) - str = re.sub(r"""'""", r"""’""", str) - return str - - -def educateDashes(str): - """ - Parameter: String. - - Returns: The string, with each instance of "--" translated to - an em-dash HTML entity. - """ - - str = re.sub(r"""---""", r"""–""", str) # en (yes, backwards) - str = re.sub(r"""--""", r"""—""", str) # em (yes, backwards) - return str - - -def educateDashesOldSchool(str): - """ - Parameter: String. - - Returns: The string, with each instance of "--" translated to - an en-dash HTML entity, and each "---" translated to - an em-dash HTML entity. - """ - - str = re.sub(r"""---""", r"""—""", str) # em (yes, backwards) - str = re.sub(r"""--""", r"""–""", str) # en (yes, backwards) - return str - - -def educateDashesOldSchoolInverted(str): - """ - Parameter: String. - - Returns: The string, with each instance of "--" translated to - an em-dash HTML entity, and each "---" translated to - an en-dash HTML entity. Two reasons why: First, unlike the - en- and em-dash syntax supported by - EducateDashesOldSchool(), it's compatible with existing - entries written before SmartyPants 1.1, back when "--" was - only used for em-dashes. Second, em-dashes are more - common than en-dashes, and so it sort of makes sense that - the shortcut should be shorter to type. (Thanks to Aaron - Swartz for the idea.) - """ - str = re.sub(r"""---""", r"""–""", str) # em - str = re.sub(r"""--""", r"""—""", str) # en - return str - - - -def educateEllipses(str): - """ - Parameter: String. - Returns: The string, with each instance of "..." translated to - an ellipsis HTML entity. - - Example input: Huh...? - Example output: Huh…? - """ - - str = re.sub(r"""\.\.\.""", r"""…""", str) - str = re.sub(r"""\. \. \.""", r"""…""", str) - return str - - -def stupefyEntities(str): - """ - Parameter: String. - Returns: The string, with each SmartyPants HTML entity translated to - its ASCII counterpart. - - Example input: “Hello — world.” - Example output: "Hello -- world." - """ - - str = re.sub(r"""–""", r"""-""", str) # en-dash - str = re.sub(r"""—""", r"""--""", str) # em-dash - - str = re.sub(r"""‘""", r"""'""", str) # open single quote - str = re.sub(r"""’""", r"""'""", str) # close single quote - - str = re.sub(r"""“""", r'''"''', str) # open double quote - str = re.sub(r"""”""", r'''"''', str) # close double quote - - str = re.sub(r"""…""", r"""...""", str)# ellipsis - - return str - - -def processEscapes(str): - r""" - Parameter: String. - Returns: The string, with after processing the following backslash - escape sequences. This is useful if you want to force a "dumb" - quote or other character to appear. - - Escape Value - ------ ----- - \\ \ - \" " - \' ' - \. . - \- - - \` ` - """ - str = re.sub(r"""\\\\""", r"""\""", str) - str = re.sub(r'''\\"''', r""""""", str) - str = re.sub(r"""\\'""", r"""'""", str) - str = re.sub(r"""\\\.""", r""".""", str) - str = re.sub(r"""\\-""", r"""-""", str) - str = re.sub(r"""\\`""", r"""`""", str) - - return str - - -def _tokenize(str): - """ - Parameter: String containing HTML markup. - Returns: Reference to an array of the tokens comprising the input - string. Each token is either a tag (possibly with nested, - tags contained therein, such as , or a - run of text between tags. Each element of the array is a - two-element array; the first is either 'tag' or 'text'; - the second is the actual value. - - Based on the _tokenize() subroutine from Brad Choate's MTRegex plugin. - - """ - - pos = 0 - length = len(str) - tokens = [] - - depth = 6 - nested_tags = "|".join(['(?:<(?:[^<>]',] * depth) + (')*>)' * depth) - #match = r"""(?: ) | # comments - # (?: <\? .*? \?> ) | # directives - # %s # nested tags """ % (nested_tags,) - tag_soup = re.compile(r"""([^<]*)(<[^>]*>)""") - - token_match = tag_soup.search(str) - - previous_end = 0 - while token_match is not None: - if token_match.group(1): - tokens.append(['text', token_match.group(1)]) - - tokens.append(['tag', token_match.group(2)]) - - previous_end = token_match.end() - token_match = tag_soup.search(str, token_match.end()) - - if previous_end < len(str): - tokens.append(['text', str[previous_end:]]) - - return tokens - - - -if __name__ == "__main__": - - import locale - - try: - locale.setlocale(locale.LC_ALL, '') - except: - pass - - from docutils.core import publish_string - docstring_html = publish_string(__doc__, writer_name='html') - - print docstring_html - - - # Unit test output goes out stderr. No worries. - import unittest - sp = smartyPants - - class TestSmartypantsAllAttributes(unittest.TestCase): - # the default attribute is "1", which means "all". - - def test_dates(self): - self.assertEqual(sp("1440-80's"), "1440-80’s") - self.assertEqual(sp("1440-'80s"), "1440-‘80s") - self.assertEqual(sp("1440---'80s"), "1440–‘80s") - self.assertEqual(sp("1960s"), "1960s") # no effect. - self.assertEqual(sp("1960's"), "1960’s") - self.assertEqual(sp("one two '60s"), "one two ‘60s") - self.assertEqual(sp("'60s"), "‘60s") - - def test_skip_tags(self): - self.assertEqual( - sp(""""""), - """""") - self.assertEqual( - sp("""

He said "Let's write some code." This code here if True:\n\tprint "Okay" is python code.

"""), - """

He said “Let’s write some code.” This code here if True:\n\tprint "Okay" is python code.

""") - - - def test_ordinal_numbers(self): - self.assertEqual(sp("21st century"), "21st century") # no effect. - self.assertEqual(sp("3rd"), "3rd") # no effect. - - def test_educated_quotes(self): - self.assertEqual(sp('''"Isn't this fun?"'''), '''“Isn’t this fun?”''') - - unittest.main() - - - - -__author__ = "Chad Miller " -__version__ = "1.5_1.6: Fri, 27 Jul 2007 07:06:40 -0400" -__url__ = "http://wiki.chad.org/SmartyPantsPy" -__description__ = "Smart-quotes, smart-ellipses, and smart-dashes for weblog entries in pyblosxom" diff --git a/dist-packages/rst2pdf/rst2pdf/sphinxnodes.py b/dist-packages/rst2pdf/rst2pdf/sphinxnodes.py deleted file mode 100644 index 4e84d79e1..000000000 --- a/dist-packages/rst2pdf/rst2pdf/sphinxnodes.py +++ /dev/null @@ -1,242 +0,0 @@ -# -*- coding: utf-8 -*- - -#$URL$ -#$Date$ -#$Revision$ - -# See LICENSE.txt for licensing terms - -''' -This module contains sphinx-specific node handlers. An import -of this module will apparently fail if sphinx.roles hasn't been -imported. - -This module creates a sphinx-specific dispatch dictionary, -which is kept separate from the regular one. - -When the SphinxHandler class is instantiated, the two dictionaries -are combined into the instantiated object. -''' - -from copy import copy - -from log import nodeid, log -from flowables import MySpacer, MyIndenter, Reference, DelayedTable, Table -from image import MyImage, VectorPdf - -from opt_imports import Paragraph, sphinx - -from nodehandlers import NodeHandler, FontHandler, HandleEmphasis -import math_flowable -from reportlab.platypus import Paragraph, TableStyle -import sphinx -import docutils - -################## NodeHandler subclasses ################### - -class SphinxHandler(NodeHandler): - sphinxmode = True - dispatchdict = {} - - def __init__(self): - ''' This is where the magic happens. Make a copy of the elements - in the non-sphinx dispatch dictionary, setting sphinxmode on - every element, and then overwrite that dictionary with any - sphinx-specific handlers. - ''' - mydict = {} - for key, value in self._baseclass.dispatchdict.iteritems(): - value = copy(value) - value.sphinxmode = True - mydict[key] = value - mydict.update(self.dispatchdict) - self.dispatchdict = mydict - - -class SphinxFont(SphinxHandler, FontHandler): - pass - -class HandleSphinxDefaults(SphinxHandler, sphinx.addnodes.glossary, - sphinx.addnodes.start_of_file, - sphinx.addnodes.compact_paragraph, - sphinx.addnodes.pending_xref): - pass - -class SphinxListHandler(SphinxHandler): - def get_text(self, client, node, replaceEnt): - t = client.gather_pdftext(node) - while t and t[0] in ', ': - t=t[1:] - return t - -class HandleSphinxDescAddname(SphinxFont, sphinx.addnodes.desc_addname): - fontstyle = "descclassname" - -class HandleSphinxDescName(SphinxFont, sphinx.addnodes.desc_name): - fontstyle = "descname" - -class HandleSphinxDescReturn(SphinxFont, sphinx.addnodes.desc_returns): - def get_font_prefix(self, client, node, replaceEnt): - return ' → ' + client.styleToFont("returns") - -class HandleSphinxDescType(SphinxFont, sphinx.addnodes.desc_type): - fontstyle = "desctype" - -class HandleSphinxDescParamList(SphinxListHandler, sphinx.addnodes.desc_parameterlist): - pre=' (' - post=')' - -class HandleSphinxDescParam(SphinxFont, sphinx.addnodes.desc_parameter): - fontstyle = "descparameter" - def get_pre_post(self, client, node, replaceEnt): - pre, post = FontHandler.get_pre_post(self, client, node, replaceEnt) - if node.hasattr('noemph'): - pre = ', ' + pre - else: - pre = ', ' + pre - post += '' - return pre, post - -class HandleSphinxDescOpt(SphinxListHandler, SphinxFont, sphinx.addnodes.desc_optional): - fontstyle = "optional" - def get_pre_post(self, client, node, replaceEnt): - prepost = FontHandler.get_pre_post(self, client, node, replaceEnt) - return '%s[%s, ' % prepost, '%s]%s' % prepost - -class HandleDescAnnotation(SphinxHandler, HandleEmphasis, sphinx.addnodes.desc_annotation): - pass - -class HandleSphinxIndex(SphinxHandler, sphinx.addnodes.index): - def gather_elements(self, client, node, style): - try: - for entry in node['entries']: - client.pending_targets.append(docutils.nodes.make_id(entry[2])) - except IndexError: - if node['entries']: - log.error("Can't process index entry: %s [%s]", - node['entries'], nodeid(node)) - return [] - -if sphinx.__version__ < '1.0': - class HandleSphinxModule(SphinxHandler, sphinx.addnodes.module): - def gather_elements(self, client, node, style): - return [Reference('module-'+node['modname'])] - -# custom SPHINX nodes. -# FIXME: make sure they are all here, and keep them all together - -class HandleSphinxCentered(SphinxHandler, sphinx.addnodes.centered): - def gather_elements(self, client, node, style): - return [Paragraph(client.gather_pdftext(node), - client.styles['centered'])] - -class HandleSphinxDesc(SphinxHandler, sphinx.addnodes.desc): - def gather_elements(self, client, node, style): - st=client.styles[node['desctype']] - if st==client.styles['normal']: - st=copy(client.styles['desc']) - st.spaceBefore=0 - pre=[MySpacer(0,client.styles['desc'].spaceBefore)] - return pre + client.gather_elements(node, st) - -class HandleSphinxDescSignature(SphinxHandler, sphinx.addnodes.desc_signature): - def gather_elements(self, client, node, style): - # Need to add ids as targets, found this when using one of the - # django docs extensions - targets=[i.replace(' ','') for i in node['ids']] - pre='' - for i in targets: - if i not in client.targets: - pre+='
'% i - client.targets.append(i) - return [Paragraph(pre+client.gather_pdftext(node),style)] - -class HandleSphinxDescContent(SphinxHandler, sphinx.addnodes.desc_content): - def gather_elements(self, client, node, style): - return [MyIndenter(left=10)] +\ - client.gather_elements(node, client.styles["definition"]) +\ - [MyIndenter(left=-10)] - -class HandleHList(SphinxHandler, sphinx.addnodes.hlist): - def gather_elements(self, client, node, style): - # Each child is a hlistcol and represents a column. - # Each grandchild is a bullet list that's the contents - # of the column - - # Represent it as a N-column, 1-row table, each cell containing - # a list. - - cells = [[ client.gather_elements(child, style) for child in node.children]] - t_style=TableStyle(client.styles['hlist'].commands) - cw=100./len(node.children) - return [ DelayedTable( cells, - colWidths=["%s%%"%cw,]*len(cells), - style=t_style - )] - -from sphinx.ext import mathbase - -class HandleHighlightLang(SphinxHandler, sphinx.addnodes.highlightlang): - pass - -class HandleSphinxMath(SphinxHandler, mathbase.math, mathbase.displaymath): - def gather_elements(self, client, node, style): - mflow=math_flowable.Math(node.get('latex',''),node.get('label',None)) - n=node['number'] - if n is not None: - number='(%s)'%node['number'] - return [Table([[mflow,number]],)] - return [mflow] - - def get_text(self, client, node, replaceEnt): - mf = math_flowable.Math(node.get('latex','')) - w, h = mf.wrap(0, 0) - descent = mf.descent() - img = mf.genImage() - client.to_unlink.append(img) - return '' % ( - img, w, h, -descent) - -class HandleSphinxEq(SphinxHandler, mathbase.eqref): - - def get_text(self, client, node, replaceEnt): - return '%s'%(node['target'], - client.styles.linkColor, node.astext()) - -graphviz_warn = False - -try: - x=sphinx.ext.graphviz.graphviz - class HandleSphinxGraphviz(SphinxHandler, sphinx.ext.graphviz.graphviz): - def gather_elements(self, client, node, style): - # Based on the graphviz extension - global graphviz_warn - try: - # Is vectorpdf enabled? - if hasattr(VectorPdf,'load_xobj'): - # Yes, we have vectorpdf - fname, outfn = sphinx.ext.graphviz.render_dot(node['builder'], node['code'], node['options'], 'pdf') - else: - # Use bitmap - if not graphviz_warn: - log.warning('Using graphviz with PNG output. You get much better results if you enable the vectorpdf extension.') - graphviz_warn = True - fname, outfn = sphinx.ext.graphviz.render_dot(node['builder'], node['code'], node['options'], 'png') - if outfn: - client.to_unlink.append(outfn) - client.to_unlink.append(outfn+'.map') - else: - # Something went very wrong with graphviz, and - # sphinx should have given an error already - return [] - except sphinx.ext.graphviz.GraphvizError, exc: - log.error('dot code %r: ' % node['code'] + str(exc)) - return [Paragraph(node['code'],client.styles['code'])] - return [MyImage(filename=outfn, client=client)] -except AttributeError: - # Probably the graphviz extension is not enabled - pass - - - -sphinxhandlers = SphinxHandler() diff --git a/dist-packages/rst2pdf/rst2pdf/styles.py b/dist-packages/rst2pdf/rst2pdf/styles.py deleted file mode 100644 index ebc617784..000000000 --- a/dist-packages/rst2pdf/rst2pdf/styles.py +++ /dev/null @@ -1,974 +0,0 @@ -# -*- coding: utf-8 -*- -# See LICENSE.txt for licensing terms -#$URL$ -#$Date$ -#$Revision$ - -import os -import sys -import re -from copy import copy -from types import * -from os.path import abspath, dirname, expanduser, join - -import docutils.nodes - -import reportlab -from reportlab.platypus import * -import reportlab.lib.colors as colors -import reportlab.lib.units as units -from reportlab.pdfbase.ttfonts import TTFont -from reportlab.lib.fonts import addMapping -from reportlab.lib.styles import * -from reportlab.lib.enums import * -from reportlab.pdfbase import pdfmetrics -import reportlab.lib.pagesizes as pagesizes -import reportlab.rl_config - -from rst2pdf.rson import loads as rson_loads - -import findfonts -from log import log - -from opt_imports import ParagraphStyle, wordaxe, wordaxe_version - -HAS_WORDAXE = wordaxe is not None - -unit_separator = re.compile('(-?[0-9\.]*)') - - -class StyleSheet(object): - '''Class to handle a collection of stylesheets''' - - @staticmethod - def stylepairs(data): - ''' Allows pairs of style information to be expressed - in canonical reportlab list of two-item list/tuple, - or in a more human-readable dictionary. - ''' - styles = data.get('styles', {}) - try: - stylenames = styles.keys() - except AttributeError: - for style in styles: - yield style - return - - # Traditional reportlab styles are in ordered (key, value) - # tuples. We also support dictionary lookup. This is not - # necessarily ordered. - - # The only problem with dictionary lookup is that - # we need to insure that parents are processed before - # their children. This loop is a little ugly, but - # gets the job done. - - while stylenames: - name = stylenames.pop() - parent = styles[name].get('parent') - if parent not in stylenames: - yield name, styles[name] - continue - names = [name] - while parent in stylenames: - stylenames.remove(parent) - names.append(parent) - parent = styles[names[-1]].get('parent') - while names: - name = names.pop() - yield name, styles[name] - - def __init__(self, flist, font_path=None, style_path=None, def_dpi=300): - log.info('Using stylesheets: %s' % ','.join(flist)) - # find base path - if hasattr(sys, 'frozen'): - self.PATH = abspath(dirname(sys.executable)) - else: - self.PATH = abspath(dirname(__file__)) - - # flist is a list of stylesheet filenames. - # They will be loaded and merged in order. - # but the two default stylesheets will always - # be loaded first - flist = [join(self.PATH, 'styles', 'styles.style'), - join(self.PATH, 'styles', 'default.style')] + flist - - self.def_dpi=def_dpi - if font_path is None: - font_path=[] - font_path+=['.', os.path.join(self.PATH, 'fonts')] - self.FontSearchPath = map(os.path.expanduser, font_path) - - if style_path is None: - style_path=[] - style_path+=['.', os.path.join(self.PATH, 'styles'), - '~/.rst2pdf/styles'] - self.StyleSearchPath = map(os.path.expanduser, style_path) - self.FontSearchPath=list(set(self.FontSearchPath)) - self.StyleSearchPath=list(set(self.StyleSearchPath)) - - log.info('FontPath:%s'%self.FontSearchPath) - log.info('StylePath:%s'%self.StyleSearchPath) - - findfonts.flist = self.FontSearchPath - # Page width, height - self.pw = 0 - self.ph = 0 - - # Page size [w,h] - self.ps = None - - # Margins (top,bottom,left,right,gutter) - self.tm = 0 - self.bm = 0 - self.lm = 0 - self.rm = 0 - self.gm = 0 - - #text width - self.tw = 0 - - # Default emsize, later it will be the fontSize of the base style - self.emsize=10 - - self.languages = [] - - ssdata = self.readSheets(flist) - - # Get pageSetup data from all stylessheets in order: - self.ps = pagesizes.A4 - self.page={} - for data, ssname in ssdata: - page = data.get('pageSetup', {}) - if page: - self.page.update(page) - pgs=page.get('size', None) - if pgs: # A standard size - pgs=pgs.upper() - if pgs in pagesizes.__dict__: - self.ps = list(pagesizes.__dict__[pgs]) - self.psname = pgs - if 'width' in self.page: del(self.page['width']) - if 'height' in self.page: del(self.page['height']) - elif pgs.endswith('-LANDSCAPE'): - self.psname = pgs.split('-')[0] - self.ps = list(pagesizes.landscape(pagesizes.__dict__[self.psname])) - if 'width' in self.page: del(self.page['width']) - if 'height' in self.page: del(self.page['height']) - else: - log.critical('Unknown page size %s in stylesheet %s'%\ - (page['size'], ssname)) - continue - else: #A custom size - if 'size'in self.page: - del(self.page['size']) - # The sizes are expressed in some unit. - # For example, 2cm is 2 centimeters, and we need - # to do 2*cm (cm comes from reportlab.lib.units) - if 'width' in page: - self.ps[0] = self.adjustUnits(page['width']) - if 'height' in page: - self.ps[1] = self.adjustUnits(page['height']) - self.pw, self.ph = self.ps - if 'margin-left' in page: - self.lm = self.adjustUnits(page['margin-left']) - if 'margin-right' in page: - self.rm = self.adjustUnits(page['margin-right']) - if 'margin-top' in page: - self.tm = self.adjustUnits(page['margin-top']) - if 'margin-bottom' in page: - self.bm = self.adjustUnits(page['margin-bottom']) - if 'margin-gutter' in page: - self.gm = self.adjustUnits(page['margin-gutter']) - if 'spacing-header' in page: - self.ts = self.adjustUnits(page['spacing-header']) - if 'spacing-footer' in page: - self.bs = self.adjustUnits(page['spacing-footer']) - if 'firstTemplate' in page: - self.firstTemplate = page['firstTemplate'] - - # tw is the text width. - # We need it to calculate header-footer height - # and compress literal blocks. - self.tw = self.pw - self.lm - self.rm - self.gm - - # Get page templates from all stylesheets - self.pageTemplates = {} - for data, ssname in ssdata: - templates = data.get('pageTemplates', {}) - # templates is a dictionary of pageTemplates - for key in templates: - template = templates[key] - # template is a dict. - # template[´frames'] is a list of frames - if key in self.pageTemplates: - self.pageTemplates[key].update(template) - else: - self.pageTemplates[key] = template - - # Get font aliases from all stylesheets in order - self.fontsAlias = {} - for data, ssname in ssdata: - self.fontsAlias.update(data.get('fontsAlias', {})) - - embedded_fontnames = [] - self.embedded = [] - # Embed all fonts indicated in all stylesheets - for data, ssname in ssdata: - embedded = data.get('embeddedFonts', []) - - for font in embedded: - try: - # Just a font name, try to embed it - if isinstance(font, unicode): - # See if we can find the font - fname, pos = findfonts.guessFont(font) - if font in embedded_fontnames: - pass - else: - fontList = findfonts.autoEmbed(font) - if fontList: - embedded_fontnames.append(font) - if not fontList: - if (fname, pos) in embedded_fontnames: - fontList = None - else: - fontList = findfonts.autoEmbed(fname) - if fontList is not None: - self.embedded += fontList - # Maybe the font we got is not called - # the same as the one we gave - # so check that out - suff = ["", "-Oblique", "-Bold", "-BoldOblique"] - if not fontList[0].startswith(font): - # We need to create font aliases, and use them - for fname, aliasname in zip( - fontList, - [font + suffix for suffix in suff]): - self.fontsAlias[aliasname] = fname - continue - - # Each "font" is a list of four files, which will be - # used for regular / bold / italic / bold+italic - # versions of the font. - # If your font doesn't have one of them, just repeat - # the regular font. - - # Example, using the Tuffy font from - # http://tulrich.com/fonts/ - # "embeddedFonts" : [ - # ["Tuffy.ttf", - # "Tuffy_Bold.ttf", - # "Tuffy_Italic.ttf", - # "Tuffy_Bold_Italic.ttf"] - # ], - - # The fonts will be registered with the file name, - # minus the extension. - - if font[0].lower().endswith('.ttf'): # A True Type font - for variant in font: - location=self.findFont(variant) - pdfmetrics.registerFont( - TTFont(str(variant.split('.')[0]), - location)) - log.info('Registering font: %s from %s'%\ - (str(variant.split('.')[0]),location)) - self.embedded.append(str(variant.split('.')[0])) - - # And map them all together - regular, bold, italic, bolditalic = [ - variant.split('.')[0] for variant in font] - addMapping(regular, 0, 0, regular) - addMapping(regular, 0, 1, italic) - addMapping(regular, 1, 0, bold) - addMapping(regular, 1, 1, bolditalic) - else: # A Type 1 font - # For type 1 fonts we require - # [FontName,regular,italic,bold,bolditalic] - # where each variant is a (pfbfile,afmfile) pair. - # For example, for the URW palladio from TeX: - # ["Palatino",("uplr8a.pfb","uplr8a.afm"), - # ("uplri8a.pfb","uplri8a.afm"), - # ("uplb8a.pfb","uplb8a.afm"), - # ("uplbi8a.pfb","uplbi8a.afm")] - faceName = font[0] - regular = pdfmetrics.EmbeddedType1Face(*font[1]) - italic = pdfmetrics.EmbeddedType1Face(*font[2]) - bold = pdfmetrics.EmbeddedType1Face(*font[3]) - bolditalic = pdfmetrics.EmbeddedType1Face(*font[4]) - - except Exception, e: - try: - if isinstance(font, list): - fname = font[0] - else: - fname = font - log.error("Error processing font %s: %s", - os.path.splitext(fname)[0], str(e)) - log.error("Registering %s as Helvetica alias", fname) - self.fontsAlias[fname] = 'Helvetica' - except Exception, e: - log.critical("Error processing font %s: %s", - fname, str(e)) - continue - - # Go though all styles in all stylesheets and find all fontNames. - # Then decide what to do with them - for data, ssname in ssdata: - for [skey, style] in self.stylepairs(data): - for key in style: - if key == 'fontName' or key.endswith('FontName'): - # It's an alias, replace it - if style[key] in self.fontsAlias: - style[key] = self.fontsAlias[style[key]] - # Embedded already, nothing to do - if style[key] in self.embedded: - continue - # Standard font, nothing to do - if style[key] in ( - "Courier", - "Courier-Bold", - "Courier-BoldOblique", - "Courier-Oblique", - "Helvetica", - "Helvetica-Bold", - "Helvetica-BoldOblique", - "Helvetica-Oblique", - "Symbol", - "Times-Bold", - "Times-BoldItalic", - "Times-Italic", - "Times-Roman", - "ZapfDingbats"): - continue - # Now we need to do something - # See if we can find the font - fname, pos = findfonts.guessFont(style[key]) - - if style[key] in embedded_fontnames: - pass - else: - fontList = findfonts.autoEmbed(style[key]) - if fontList: - embedded_fontnames.append(style[key]) - if not fontList: - if (fname, pos) in embedded_fontnames: - fontList = None - else: - fontList = findfonts.autoEmbed(fname) - if fontList: - embedded_fontnames.append((fname, pos)) - if fontList: - self.embedded += fontList - # Maybe the font we got is not called - # the same as the one we gave so check that out - suff = ["", "-Bold", "-Oblique", "-BoldOblique"] - if not fontList[0].startswith(style[key]): - # We need to create font aliases, and use them - basefname=style[key].split('-')[0] - for fname, aliasname in zip( - fontList, - [basefname + suffix for - suffix in suff]): - self.fontsAlias[aliasname] = fname - style[key] = self.fontsAlias[basefname +\ - suff[pos]] - else: - log.error("Unknown font: \"%s\"," - "replacing with Helvetica", style[key]) - style[key] = "Helvetica" - - #log.info('FontList: %s'%self.embedded) - #log.info('FontAlias: %s'%self.fontsAlias) - # Get styles from all stylesheets in order - self.stylesheet = {} - self.styles = [] - self.linkColor = 'navy' - # FIXME: linkColor should probably not be a global - # style, and tocColor should probably not - # be a special case, but for now I'm going - # with the flow... - self.tocColor = None - for data, ssname in ssdata: - self.linkColor = data.get('linkColor') or self.linkColor - self.tocColor = data.get('tocColor') or self.tocColor - for [skey, style] in self.stylepairs(data): - sdict = {} - # FIXME: this is done completely backwards - for key in style: - # Handle color references by name - if key == 'color' or key.endswith('Color') and style[key]: - style[key] = formatColor(style[key]) - - # Yet another workaround for the unicode bug in - # reportlab's toColor - elif key == 'commands': - style[key]=validateCommands(style[key]) - #for command in style[key]: - #c=command[0].upper() - #if c=='ROWBACKGROUNDS': - #command[3]=[str(c) for c in command[3]] - #elif c in ['BOX','INNERGRID'] or c.startswith('LINE'): - #command[4]=str(command[4]) - - # Handle alignment constants - elif key == 'alignment': - style[key] = dict(TA_LEFT=0, - LEFT=0, - TA_CENTER=1, - CENTER=1, - TA_CENTRE=1, - CENTRE=1, - TA_RIGHT=2, - RIGHT=2, - TA_JUSTIFY=4, - JUSTIFY=4, - DECIMAL=8, )[style[key].upper()] - - elif key == 'language': - if not style[key] in self.languages: - self.languages.append(style[key]) - - # Make keys str instead of unicode (required by reportlab) - sdict[str(key)] = style[key] - sdict['name'] = skey - # If the style already exists, update it - if skey in self.stylesheet: - self.stylesheet[skey].update(sdict) - else: # New style - self.stylesheet[skey] = sdict - self.styles.append(sdict) - - # If the stylesheet has a style name docutils won't reach - # make a copy with a sanitized name. - # This may make name collisions possible but that should be - # rare (who would have custom_name and custom-name in the - # same stylesheet? ;-) - # Issue 339 - - styles2=[] - for s in self.styles: - if not re.match("^[a-z](-?[a-z0-9]+)*$", s['name']): - s2 = copy(s) - s2['name'] = docutils.nodes.make_id(s['name']) - log.warning('%s is an invalid docutils class name, adding alias %s'%(s['name'], s2['name'])) - styles2.append(s2) - self.styles.extend(styles2) - - # And create reportlabs stylesheet - self.StyleSheet = StyleSheet1() - # Patch to make the code compatible with reportlab from SVN 2.4+ and - # 2.4 - if not hasattr(self.StyleSheet, 'has_key'): - self.StyleSheet.__class__.has_key = lambda s, k : k in s - for s in self.styles: - if 'parent' in s: - if s['parent'] is None: - if s['name'] != 'base': - s['parent'] = self.StyleSheet['base'] - else: - del(s['parent']) - else: - s['parent'] = self.StyleSheet[s['parent']] - else: - if s['name'] != 'base': - s['parent'] = self.StyleSheet['base'] - - # If the style has no bulletFontName but it has a fontName, set it - if ('bulletFontName' not in s) and ('fontName' in s): - s['bulletFontName'] = s['fontName'] - - hasFS = True - # Adjust fontsize units - if 'fontSize' not in s: - s['fontSize'] = s['parent'].fontSize - s['trueFontSize']=None - hasFS = False - elif 'parent' in s: - # This means you can set the fontSize to - # "2cm" or to "150%" which will be calculated - # relative to the parent style - s['fontSize'] = self.adjustUnits(s['fontSize'], - s['parent'].fontSize) - s['trueFontSize']=s['fontSize'] - else: - # If s has no parent, it's base, which has - # an explicit point size by default and % - # makes no sense, but guess it as % of 10pt - s['fontSize'] = self.adjustUnits(s['fontSize'], 10) - - # If the leading is not set, but the size is, set it - if 'leading' not in s and hasFS: - s['leading'] = 1.2*s['fontSize'] - - # If the bullet font size is not set, set it as fontSize - if ('bulletFontSize' not in s) and ('fontSize' in s): - s['bulletFontSize'] = s['fontSize'] - - # If the borderPadding is a list and wordaxe <=0.3.2, - # convert it to an integer. Workaround for Issue - if 'borderPadding' in s and ((HAS_WORDAXE and \ - wordaxe_version <='wordaxe 0.3.2') or - reportlab.Version < "2.3" )\ - and isinstance(s['borderPadding'], list): - log.warning('Using a borderPadding list in '\ - 'style %s with wordaxe <= 0.3.2 or Reportlab < 2.3. That is not '\ - 'supported, so it will probably look wrong'%s['name']) - s['borderPadding']=s['borderPadding'][0] - - self.StyleSheet.add(ParagraphStyle(**s)) - - - self.emsize=self['base'].fontSize - # Make stdFont the basefont, for Issue 65 - reportlab.rl_config.canvas_basefontname = self['base'].fontName - # Make stdFont the default font for table cell styles (Issue 65) - reportlab.platypus.tables.CellStyle.fontname=self['base'].fontName - - - def __getitem__(self, key): - - # This 'normalizes' the key. - # For example, if the key is todo_node (like sphinx uses), it will be - # converted to 'todo-node' which is a valid docutils class name. - - if not re.match("^[a-z](-?[a-z0-9]+)*$", key): - key = docutils.nodes.make_id(key) - - if self.StyleSheet.has_key(key): - return self.StyleSheet[key] - else: - if key.startswith('pygments'): - log.info("Using undefined style '%s'" - ", aliased to style 'code'."%key) - newst=copy(self.StyleSheet['code']) - else: - log.warning("Using undefined style '%s'" - ", aliased to style 'normal'."%key) - newst=copy(self.StyleSheet['normal']) - newst.name=key - self.StyleSheet.add(newst) - return newst - - def readSheets(self, flist): - ''' Read in the stylesheets. Return a list of - (sheetdata, sheetname) tuples. - - Orders included sheets in front - of including sheets. - ''' - # Process from end of flist - flist.reverse() - # Keep previously seen sheets in sheetdict - sheetdict = {} - result = [] - - while flist: - ssname = flist.pop() - data = sheetdict.get(ssname) - if data is None: - data = self.readStyle(ssname) - if data is None: - continue - sheetdict[ssname] = data - if 'options' in data and 'stylesheets' in data['options']: - flist.append(ssname) - newsheets = list(data['options']['stylesheets']) - newsheets.reverse() - flist.extend(newsheets) - continue - result.append((data, ssname)) - return result - - def readStyle(self, ssname): - # If callables are used, they should probably be subclassed - # strings, or something else that will print nicely for errors - if callable(ssname): - return ssname() - - fname = self.findStyle(ssname) - if fname: - try: - return rson_loads(open(fname).read()) - except ValueError, e: # Error parsing the JSON data - log.critical('Error parsing stylesheet "%s": %s'%\ - (fname, str(e))) - except IOError, e: #Error opening the ssheet - log.critical('Error opening stylesheet "%s": %s'%\ - (fname, str(e))) - - def findStyle(self, fn): - """Find the absolute file name for a given style filename. - - Given a style filename, searches for it in StyleSearchPath - and returns the real file name. - - """ - - def innerFind(path, fn): - if os.path.isabs(fn): - if os.path.isfile(fn): - return fn - else: - for D in path: - tfn = os.path.join(D, fn) - if os.path.isfile(tfn): - return tfn - return None - for ext in ['', '.style', '.json']: - result = innerFind(self.StyleSearchPath, fn+ext) - if result: - break - if result is None: - log.warning("Can't find stylesheet %s"%fn) - return result - - def findFont(self, fn): - """Find the absolute font name for a given font filename. - - Given a font filename, searches for it in FontSearchPath - and returns the real file name. - - """ - if not os.path.isabs(fn): - for D in self.FontSearchPath: - tfn = os.path.join(D, fn) - if os.path.isfile(tfn): - return str(tfn) - return str(fn) - - def styleForNode(self, node): - """Return the right default style for any kind of node. - - That usually means "bodytext", but for sidebars, for - example, it's sidebar. - - """ - n= docutils.nodes - styles={n.sidebar: 'sidebar', - n.figure: 'figure', - n.tgroup: 'table', - n.table: 'table', - n.Admonition: 'admonition' - } - - return self[styles.get(node.__class__, 'bodytext')] - - def tstyleHead(self, rows=1): - """Return a table style spec for a table header of `rows`. - - The style will be based on the table-heading style from the stylesheet. - - """ - # This alignment thing is exactly backwards from - # the alignment for paragraphstyles - alignment = {0: 'LEFT', 1: 'CENTER', 1: 'CENTRE', 2: 'RIGHT', - 4: 'JUSTIFY', 8: 'DECIMAL'}[self['table-heading'].alignment] - return [ - ('BACKGROUND', - (0, 0), - (-1, rows - 1), - self['table-heading'].backColor), - ('ALIGN', - (0, 0), - (-1, rows - 1), - alignment), - ('TEXTCOLOR', - (0, 0), - (-1, rows - 1), - self['table-heading'].textColor), - ('FONT', - (0, 0), - (-1, rows - 1), - self['table-heading'].fontName, - self['table-heading'].fontSize, - self['table-heading'].leading), - ('VALIGN', - (0, 0), - (-1, rows - 1), - self['table-heading'].valign)] - - def adjustFieldStyle(self): - """Merges fieldname and fieldvalue styles into the field table style""" - tstyle=self.tstyles['field'] - extras=self.pStyleToTStyle(self['fieldname'], 0, 0)+\ - self.pStyleToTStyle(self['fieldvalue'], 1, 0) - for e in extras: - tstyle.add(*e) - return tstyle - - def pStyleToTStyle(self, style, x, y): - """Return a table style similar to a given paragraph style. - - Given a reportlab paragraph style, returns a spec for a table style - that adopts some of its features (for example, the background color). - - """ - results = [] - if style.backColor: - results.append(('BACKGROUND', (x, y), (x, y), style.backColor)) - if style.borderWidth: - bw = style.borderWidth - del style.__dict__['borderWidth'] - if style.borderColor: - bc = style.borderColor - del style.__dict__['borderColor'] - else: - bc = colors.black - bc=str(bc) - results.append(('BOX', (x, y), (x, y), bw, bc)) - if style.borderPadding: - if isinstance(style.borderPadding, list): - results.append(('TOPPADDING', - (x, y), - (x, y), - style.borderPadding[0])) - results.append(('RIGHTPADDING', - (x, y), - (x, y), - style.borderPadding[1])) - results.append(('BOTTOMPADDING', - (x, y), - (x, y), - style.borderPadding[2])) - results.append(('LEFTPADDING', - (x, y), - (x, y), - style.borderPadding[3])) - else: - results.append(('TOPPADDING', - (x, y), - (x, y), - style.borderPadding)) - results.append(('RIGHTPADDING', - (x, y), - (x, y), - style.borderPadding)) - results.append(('BOTTOMPADDING', - (x, y), - (x, y), - style.borderPadding)) - results.append(('LEFTPADDING', - (x, y), - (x, y), - style.borderPadding)) - return results - - def adjustUnits(self, v, total=None, default_unit='pt'): - if total is None: - total = self.tw - return adjustUnits(v, total, - self.def_dpi, - default_unit, - emsize=self.emsize) - - def combinedStyle(self, styles): - '''Given a list of style names, it merges them (the existing ones) - and returns a new style. - - The styles that don't exist are silently ignored. - - For example, if called with styles=['style1','style2'] the returned - style will be called 'merged_style1_style2'. - - The styles that are *later* in the list will have priority. - ''' - - validst = [x for x in styles if self.StyleSheet.has_key(x)] - newname = '_'.join(['merged']+validst) - validst = [self[x] for x in validst] - newst=copy(validst[0]) - - for st in validst[1:]: - newst.__dict__.update(st.__dict__) - - newst.name=newname - return newst - - -def adjustUnits(v, total=None, dpi=300, default_unit='pt', emsize=10): - """Takes something like 2cm and returns 2*cm. - - If you use % as a unit, it returns the percentage of "total". - - If total is not given, returns a percentage of the page width. - However, if you get to that stage, you are doing it wrong. - - Example:: - - >>> adjustUnits('50%',200) - 100 - - """ - - if v is None or v=="": - return None - - v = str(v) - l = re.split('(-?[0-9\.]*)', v) - n=l[1] - u=default_unit - if len(l) == 3 and l[2]: - u=l[2] - if u in units.__dict__: - return float(n) * units.__dict__[u] - else: - if u == '%': - return float(n) * total/100 - elif u=='px': - return float(n) * units.inch / dpi - elif u=='pt': - return float(n) - elif u=='in': - return float(n) * units.inch - elif u=='em': - return float(n) * emsize - elif u=='ex': - return float(n) * emsize /2 - elif u=='pc': # picas! - return float(n) * 12 - log.error('Unknown unit "%s"' % u) - return float(n) - - -def formatColor(value, numeric=True): - """Convert a color like "gray" or "0xf" or "ffff" - to something ReportLab will like.""" - if value in colors.__dict__: - return colors.__dict__[value] - else: # Hopefully, a hex color: - c = value.strip() - if c[0] == '#': - c = c[1:] - while len(c) < 6: - c = '0' + c - if numeric: - r = int(c[:2], 16)/255. - g = int(c[2:4], 16)/255. - b = int(c[4:6], 16)/255. - if len(c) >= 8: - alpha = int(c[6:8], 16)/255. - return colors.Color(r, g, b, alpha=alpha) - return colors.Color(r, g, b) - else: - return str("#"+c) - -# The values are: -# * Minimum number of arguments -# * Maximum number of arguments -# * Valid types of arguments. -# -# For example, if option FOO takes a list a string and a number, -# but the number is optional: -# -# "FOO":(2,3,"list","string","number") -# -# The reportlab command could look like -# -# ["FOO",(0,0),(-1,-1),[1,2],"whatever",4] -# -# THe (0,0) (-1,-1) are start and stop and are mandatory. -# -# Possible types of arguments are string, number, color, colorlist - - -validCommands={ - # Cell format commands - "FONT":(1,3,"string","number","number"), - "FONTNAME":(1,1,"string"), - "FACE":(1,1,"string"), - "FONTSIZE":(1,1,"number"), - "SIZE":(1,1,"number"), - "LEADING":(1,1,"number"), - "TEXTCOLOR":(1,1,"color"), - "ALIGNMENT":(1,1,"string"), - "ALIGN":(1,1,"string"), - "LEFTPADDING":(1,1,"number"), - "RIGHTPADDING":(1,1,"number"), - "TOPPADDING":(1,1,"number"), - "BOTTOMPADDING":(1,1,"number"), - "BACKGROUND":(1,1,"color"), - "ROWBACKGROUNDS":(1,1,"colorlist"), - "COLBACKGROUNDS":(1,1,"colorlist"), - "VALIGN":(1,1,"string"), - # Line commands - "GRID":(2,2,"number","color"), - "BOX":(2,2,"number","color"), - "OUTLINE":(2,2,"number","color"), - "INNERGRID":(2,2,"number","color"), - "LINEBELOW":(2,2,"number","color"), - "LINEABOVE":(2,2,"number","color"), - "LINEBEFORE":(2,2,"number","color"), - "LINEAFTER":(2,2,"number","color"), - # You should NOT have span commands, man! - #"SPAN":(,,), - } - -def validateCommands(commands): - '''Given a list of reportlab's table commands, it fixes some common errors - and/or removes commands that can't be fixed''' - - fixed=[] - - for command in commands: - command[0]=command[0].upper() - flag=False - # See if the command is valid - if command[0] not in validCommands: - log.error('Unknown table command %s in stylesheet',command[0]) - continue - - # See if start and stop are the right types - if type(command[1]) not in (ListType,TupleType): - log.error('Start cell in table command should be list or tuple, got %s [%s]',type(command[1]),command[1]) - flag=True - - if type(command[2]) not in (ListType,TupleType): - log.error('Stop cell in table command should be list or tuple, got %s [%s]',type(command[1]),command[1]) - flag=True - - # See if the number of arguments is right - l=len(command)-3 - if l>validCommands[command[0]][1]: - log.error('Too many arguments in table command: %s',command) - flag=True - - if l - -.. class:: title - -#{title} - - - - - -.. class:: subtitle - -#{subtitle} - - diff --git a/dist-packages/rst2pdf/rst2pdf/templates/sphinxcover.tmpl b/dist-packages/rst2pdf/rst2pdf/templates/sphinxcover.tmpl deleted file mode 100644 index 459cd9050..000000000 --- a/dist-packages/rst2pdf/rst2pdf/templates/sphinxcover.tmpl +++ /dev/null @@ -1,40 +0,0 @@ -.. role:: normal - - - -.. cssclass:: title - -#{title} - - - - - -.. cssclass:: subtitle - -#{subtitle} - - - -.. raw:: pdf - - Spacer 0 3cm - - -.. cssclass:: author - -#{author} - - - -.. raw:: pdf - - Spacer 0 3cm - -.. cssclass:: author - -:normal:`#{date}` - -.. raw:: pdf - - OddPageBreak diff --git a/dist-packages/rst2pdf/rst2pdf/tenjin.py b/dist-packages/rst2pdf/rst2pdf/tenjin.py deleted file mode 100644 index c90b09c6a..000000000 --- a/dist-packages/rst2pdf/rst2pdf/tenjin.py +++ /dev/null @@ -1,1078 +0,0 @@ -# -*- coding: utf-8 -*- -## -## $Rev: 137 $ -## $Release: 0.6.2 $ -## copyright(c) 2007-2008 kuwata-lab.com all rights reserved. -## -## Permission is hereby granted, free of charge, to any person obtaining -## a copy of this software and associated documentation files (the -## "Software"), to deal in the Software without restriction, including -## without limitation the rights to use, copy, modify, merge, publish, -## distribute, sublicense, and/or sell copies of the Software, and to -## permit persons to whom the Software is furnished to do so, subject to -## the following conditions: -## -## The above copyright notice and this permission notice shall be -## included in all copies or substantial portions of the Software. -## -## THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -## EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -## MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -## NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -## LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -## OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -## WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -## - -"""Very fast and light-weight template engine based embedded Python. - - pyTenjin is similar to PHP or eRuby (embedded Ruby). - * '' represents python statement. - * '#{...}' represents python expression. - * '${...}' represents python expression with escaping. - - And it provides the following features. - * Layout template and nested template - * Including other template files - * Template caching - * Capturing - - See help of tenjin.Template and tenjin.Engine for details. -""" - -__revision__ = "$Rev: 137 $"[6:-2] -__release__ = "0.6.2" -__license__ = "MIT License" -__all__ = ['Template', 'Engine', 'helpers', 'html', ] - - -import re, sys, os, time, marshal - - -## -## utilities -## - -try: - import fcntl - def _lock_file(file, content): - fcntl.flock(file.fileno(), fcntl.LOCK_EX) -except ImportError, ex: - try: - import msvcrt - def _lock_file(file, content): - msvcrt.locking(file.fileno(), msvcrt.LK_LOCK, len(content)) - except ImportError, ex: - def _lock_file(file, content): - pass - -def _write_file_with_lock(filename, content): - f = None - try: - f = open(filename, 'wb') - _lock_file(f, content) - f.write(content) - finally: - if f: - f.close() - -def _create_module(module_name): - """ex. mod = _create_module('tenjin.util')""" - import new - mod = new.module(module_name.split('.')[-1]) - sys.modules[module_name] = mod - return mod - - - -## -## helper method's module -## - -def _create_helpers_module(): - - def to_str(val): - """Convert value into string. Return '' if val is None. - ex. - >>> to_str(None) - '' - >>> to_str("foo") - 'foo' - >>> to_str(u"\u65e5\u672c\u8a9e") - u'\u65e5\u672c\u8a9e' - >>> to_str(123) - '123' - """ - if val is None: return '' - if isinstance(val, str): return val - if isinstance(val, unicode): return val - return str(val) - - def generate_tostrfunc(encoding): - """Generate 'to_str' function which encodes unicode to str. - ex. - import tenjin - from tenjin.helpers import escape - to_str = tenjin.generate_tostrfunc('utf-8') - engine = tenjin.Engine() - context = { 'items': [u'AAA', u'BBB', u'CCC'] } - output = engine.render('example.pyhtml') - print output - """ - def to_str(val): - if val is None: return '' - if isinstance(val, str): return val - if isinstance(val, unicode): return val.encode(encoding) - return str(val) - return to_str - - def echo(string): - """add string value into _buf. this is equivarent to '#{string}'.""" - frame = sys._getframe(1) - context = frame.f_locals - context['_buf'].append(string) - - def start_capture(varname=None): - """ - start capturing with name. - - ex. list.rbhtml - - -
    - -
  • ${item}
  • - -
- - - - ex. layout.rbhtml - - - Capture Example - - - - #{itemlist} - - - - """ - frame = sys._getframe(1) - context = frame.f_locals - context['_buf_tmp'] = context['_buf'] - context['_capture_varname'] = varname - context['_buf'] = [] - - def stop_capture(store_to_context=True): - """ - stop capturing and return the result of capturing. - if store_to_context is True then the result is stored into _context[varname]. - """ - frame = sys._getframe(1) - context = frame.f_locals - result = ''.join(context['_buf']) - context['_buf'] = context.pop('_buf_tmp') - varname = context.pop('_capture_varname') - if varname: - context[varname] = result - if store_to_context: - context['_context'][varname] = result - return result - - def captured_as(name): - """ - helper method for layout template. - if captured string is found then append it to _buf and return True, - else return False. - """ - frame = sys._getframe(1) - context = frame.f_locals - if context.has_key(name): - _buf = context['_buf'] - _buf.append(context[name]) - return True - return False - - def _p(arg): - """ex. '/show/'+_p("item['id']") => "/show/#{item['id']}" """ - return '<`#%s#`>' % arg # decoded into #{...} by preprocessor - - def _P(arg): - """ex. '%s' % _P("item['id']") => "${item['id']}" """ - return '<`$%s$`>' % arg # decoded into ${...} by preprocessor - - def _decode_params(s): - """decode <`#...#`> and <`$...$`> into #{...} and ${...}""" - from urllib import unquote - dct = { 'lt':'<', 'gt':'>', 'amp':'&', 'quot':'"', '#039':"'", } - def unescape(s): - #return s.replace('<', '<').replace('>', '>').replace('"', '"').replace(''', "'").replace('&', '&') - return re.sub(r'&(lt|gt|quot|amp|#039);', lambda m: dct[m.group(1)], s) - s = re.sub(r'%3C%60%23(.*?)%23%60%3E', lambda m: '#{%s}' % unquote(m.group(1)), s) - s = re.sub(r'%3C%60%24(.*?)%24%60%3E', lambda m: '${%s}' % unquote(m.group(1)), s) - s = re.sub(r'<`#(.*?)#`>', lambda m: '#{%s}' % unescape(m.group(1)), s) - s = re.sub(r'<`\$(.*?)\$`>', lambda m: '${%s}' % unescape(m.group(1)), s) - s = re.sub(r'<`#(.*?)#`>', r'#{\1}', s) - s = re.sub(r'<`\$(.*?)\$`>', r'${\1}', s) - return s - - mod = _create_module('tenjin.helpers') - mod.to_str = to_str - mod.generate_tostrfunc = generate_tostrfunc - mod.echo = echo - mod.start_capture = start_capture - mod.stop_capture = stop_capture - mod.captured_as = captured_as - mod._p = _p - mod._P = _P - mod._decode_params = _decode_params - mod.__all__ = ['escape', 'to_str', 'echo', 'generate_tostrfunc', - 'start_capture', 'stop_capture', 'captured_as', - '_p', '_P', '_decode_params', - ] - return mod - -helpers = _create_helpers_module() -del _create_helpers_module -generate_tostrfunc = helpers.generate_tostrfunc - - - -## -## module for html -## - -def _create_html_module(): - - to_str = helpers.to_str - _escape_table = { '&': '&', '<': '<', '>': '>', '"': '"' } - _escape_pattern = re.compile(r'[&<>"]') - _escape_callable = lambda m: _escape_table[m.group(0)] - - def escape_xml(s): - """Escape '&', '<', '>', '"' into '&', '<', '>', '"'. - """ - return _escape_pattern.sub(_escape_callable, s) - #return s.replace('&','&').replace('<','<').replace('>','>').replace('"','"') - - def tagattr(name, expr, value=None, escape=True): - """return empty string when expr is false value, ' name="value"' when - value is specified, or ' name="expr"' when value is None. - ex. - >>> tagattr('size', 20) - ' size="20"' - >>> tagattr('size', 0) - '' - >>> tagattr('checked', True, 'checked') - ' checked="checked"' - >>> tagattr('checked', False, 'checked') - '' - """ - if not expr: - return '' - if value is None: - value = to_str(expr) - else: - value = to_str(value) - if escape: - value = escape_xml(value) - return ' %s="%s"' % (name, value) - - def checked(expr): - """return ' checked="checked"' if expr is true.""" - return expr and ' checked="checked"' or '' - #return attr('checked', expr, 'checked') - - def selected(expr): - """return ' selected="selected"' if expr is true.""" - return expr and ' selected="selected"' or '' - #return attr('selected', expr, 'selected') - - def disabled(expr): - """return ' disabled="disabled"' if expr is true.""" - return expr and ' disabled="disabled"' or '' - #return attr('disabled, expr, 'disabled') - - def nl2br(text): - """replace "\n" to "
\n" and return it.""" - if not text: - return '' - return text.replace('\n', '
\n') - - def text2html(text): - """(experimental) escape xml characters, replace "\n" to "
\n", and return it.""" - if not text: - return '' - return nl2br(escape_xml(text).replace(' ', '  ')) - - mod = _create_module('tenjin.helpers.html') - mod._escape_table = _escape_table - mod.escape_xml = escape_xml - mod.escape = escape_xml - mod.tagattr = tagattr - mod.checked = checked - mod.selected = selected - mod.disabled = disabled - mod.nl2br = nl2br - mod.text2html = text2html - return mod - -helpers.html = _create_html_module() -del _create_html_module - -helpers.escape = helpers.html.escape_xml - - - -## -## Template class -## - -class Template(object): - """Convert and evaluate embedded python string. - - Notation: - * '' means python statement code. - * '#{...}' means python expression code. - * '${...}' means python escaped expression code. - - ex. example.pyhtml - - - - - - - - - -
${item}
- - ex. - >>> filename = 'example.pyhtml' - >>> import tenjin - >>> from tenjin.helpers import escape, to_str - >>> template = tenjin.Template(filename) - >>> script = template.script - >>> ## or - >>> #template = tenjin.Template() - >>> #script = template.convert_file(filename) - >>> ## or - >>> #template = tenjin.Template() - >>> #input = open(filename).read() - >>> #script = template.convert(input, filename) # filename is optional - >>> print script - >>> context = {'items': ['','bar&bar','"baz"']} - >>> output = template.render(context) - >>> print output - - - - - - - - - - -
<foo>
bar&bar
"baz"
- """ - - ## default value of attributes - filename = None - encoding = None - escapefunc = 'escape' - tostrfunc = 'to_str' - indent = 4 - preamble = None - postamble = None # "_buf = []" - smarttrim = None # "print ''.join(_buf)" - args = None - - def __init__(self, filename=None, encoding=None, escapefunc=None, tostrfunc=None, indent=None, preamble=None, postamble=None, smarttrim=None): - """Initailizer of Template class. - - filename:str (=None) - Filename to convert (optional). If None, no convert. - encoding:str (=None) - Encoding name. If specified, template string is converted into - unicode object internally. - Template.render() returns str object if encoding is None, - else returns unicode object if encoding name is specified. - escapefunc:str (='escape') - Escape function name. - tostrfunc:str (='to_str') - 'to_str' function name. - indent:int (=4) - Indent width. - preamble:str or bool (=None) - Preamble string which is inserted into python code. - If true, '_buf = []' is used insated. - postamble:str or bool (=None) - Postamble string which is appended to python code. - If true, 'print "".join(_buf)' is used instead. - smarttrim:bool (=None) - If True then "
\\n#{_context}\\n
" is parsed as - "
\\n#{_context}
". - """ - if encoding is not None: self.encoding = encoding - if escapefunc is not None: self.escapefunc = escapefunc - if tostrfunc is not None: self.tostrfunc = tostrfunc - if indent is not None: self.indent = indent - if preamble is not None: self.preamble = preamble - if postamble is not None: self.postamble = postamble - if smarttrim is not None: self.smarttrim = smarttrim - # - if preamble is True: self.preamble = "_buf = []" - if postamble is True: self.postamble = "print ''.join(_buf)" - if filename: - self.convert_file(filename) - else: - self._reset() - - def _reset(self, input=None, filename=None): - self._spaces = '' - self.script = None - self.bytecode = None - self.input = input - self.filename = filename - if input != None: - i = input.find("\n") - if i < 0: - self.newline = "\n" # or None - elif len(input) >= 2 and input[i-1] == "\r": - self.newline = "\r\n" - else: - self.newline = "\n" - - def before_convert(self, buf): - #buf.append('_buf = []; ') - if self.preamble: - buf.append(self.preamble) - buf.append(self.input.startswith('>> import tenjin - >>> from tenjin.helpers import escape, to_str - >>> template = tenjin.Template() - >>> filename = 'example.html' - >>> input = open(filename).read() - >>> script = template.convert(input, filename) # filename is optional - >>> print script - """ - if self.encoding and isinstance(input, str): - input = input.decode(self.encoding) - self._reset(input, filename) - buf = [] - self.before_convert(buf) - self.parse_stmts(buf, input) - self.after_convert(buf) - script = ''.join(buf) - self.script = script - return script - - def compile_stmt_pattern(pi): - return re.compile(r'<\?%s( |\t|\r?\n)(.*?) ?\?>([ \t]*\r?\n)?' % pi, re.S) - - STMT_PATTERN = compile_stmt_pattern('py') - - compile_stmt_pattern = staticmethod(compile_stmt_pattern) - - def stmt_pattern(self): - return Template.STMT_PATTERN - - def parse_stmts(self, buf, input): - if not input: - return - rexp = self.stmt_pattern() - is_bol = True - index = 0 - for m in rexp.finditer(input): - mspace, code, rspace = m.groups() - #mspace, close, rspace = m.groups() - #code = input[m.start()+4+len(mspace):m.end()-len(close)-(rspace and len(rspace) or 0)] - text = input[index:m.start()] - index = m.end() - ## detect spaces at beginning of line - lspace = None - if text == '': - if is_bol: - lspace = '' - elif text[-1] == '\n': - lspace = '' - else: - rindex = text.rfind('\n') - if rindex < 0: - if is_bol and text.isspace(): - lspace = text - text = '' - else: - s = text[rindex+1:] - if s.isspace(): - lspace = s - text = text[:rindex+1] - #is_bol = rspace is not None - ## add text, spaces, and statement - self.parse_exprs(buf, text, is_bol) - is_bol = rspace is not None - if lspace: - buf.append(lspace) - if mspace != " ": - #buf.append(mspace) - buf.append(mspace == "\t" and "\t" or "\n") # don't append "\r\n"! - if code: - code = self.statement_hook(code) - self.add_stmt(buf, code) - self._set_spaces(code, lspace, mspace) - if rspace: - #buf.append(rspace) - buf.append("\n") # don't append "\r\n"! - rest = input[index:] - if rest: - self.parse_exprs(buf, rest) - - def statement_hook(self, stmt): - """expand macros and parse '#@ARGS' in a statement.""" - ## macro expantion - #macro_pattern = r'^(\s*)(\w+)\((.*?)\);?\s*$'; - #m = re.match(macro_pattern, stmt) - #if m: - # lspace, name, arg = m.group(1), m.group(2), m.group(3) - # handler = self.get_macro_handler(name) - # return handler is None and stmt or lspace + handler(arg) - ## arguments declaration - if self.args is None: - args_pattern = r'^ *#@ARGS(?:[ \t]+(.*?))?$' - m = re.match(args_pattern, stmt) - if m: - arr = (m.group(1) or '').split(',') - args = []; declares = [] - for s in arr: - arg = s.strip() - if not s: continue - if not re.match('^[a-zA-Z_]\w*$', arg): - raise ValueError("%s: invalid template argument." % arg) - args.append(arg) - declares.append("%s = _context.get('%s'); " % (arg, arg)) - self.args = args - return ''.join(declares) - ## - return stmt - - #MACRO_HANDLER_TABLE = { - # "echo": - # lambda arg: "_buf.append(%s); " % arg, - # "include": - # lambda arg: "_buf.append(_context['_engine'].render(%s, _context, layout=False)); " % arg, - # "start_capture": - # lambda arg: "_buf_bkup = _buf; _buf = []; _capture_varname = %s; " % arg, - # "stop_capture": - # lambda arg: "_context[_capture_varname] = ''.join(_buf); _buf = _buf_bkup; ", - # "start_placeholder": - # lambda arg: "if (_context[%s]) _buf.push(_context[%s]); else:" % (arg, arg), - # "stop_placeholder": - # lambda arg: "#endif", - #} - # - #def get_macro_handler(name): - # return MACRO_HANDLER_TABLE.get(name) - - EXPR_PATTERN = re.compile(r'([#$])\{(.*?)\}', re.S) - - def expr_pattern(self): - return Template.EXPR_PATTERN - - def get_expr_and_escapeflag(self, match): - return match.group(2), match.group(1) == '$' - - def parse_exprs(self, buf, input, is_bol=False): - if not input: - return - if self._spaces: - buf.append(self._spaces) - self.start_text_part(buf) - rexp = self.expr_pattern() - smarttrim = self.smarttrim - nl = self.newline - nl_len = len(nl) - pos = 0 - for m in rexp.finditer(input): - start = m.start() - text = input[pos:start] - pos = m.end() - expr, flag_escape = self.get_expr_and_escapeflag(m) - # - if text: - self.add_text(buf, text) - #if text[-1] == "\n": - # buf.append("\n") - # if self._spaces: - # buf.append(self._spaces) - self.add_expr(buf, expr, flag_escape) - # - if smarttrim: - flag_bol = text.endswith(nl) or not text and (start > 0 or is_bol) - if flag_bol and not flag_escape and input[pos:pos+nl_len] == nl: - pos += nl_len - buf.append("\n") - if smarttrim: - if buf and buf[-1] == "\n": - buf.pop() - rest = input[pos:] - if rest: - self.add_text(buf, rest, True) - self.stop_text_part(buf) - if input[-1] == '\n': - buf.append("\n") - - def start_text_part(self, buf): - buf.append("_buf.extend((") - - def stop_text_part(self, buf): - buf.append("));") - - _quote_rexp = re.compile(r"(['\\\\])") - - def add_text(self, buf, text, encode_newline=False): - if not text: - return; - if self.encoding: - buf.append("u'''") - else: - buf.append("'''") - #text = re.sub(r"(['\\\\])", r"\\\1", text) - text = Template._quote_rexp.sub(r"\\\1", text) - if not encode_newline or text[-1] != "\n": - buf.append(text) - buf.append("''', ") - elif len(text) >= 2 and text[-2] == "\r": - buf.append(text[0:-2]) - buf.append("\\r\\n''', ") - else: - buf.append(text[0:-1]) - buf.append("\\n''', ") - - _add_text = add_text - - def add_expr(self, buf, code, flag_escape=None): - if not code or code.isspace(): - return - if flag_escape is None: - buf.append(code); buf.append(", "); - elif flag_escape is False: - buf.extend((self.tostrfunc, "(", code, "), ")) - else: - buf.extend((self.escapefunc, "(", self.tostrfunc, "(", code, ")), ")) - - def add_stmt(self, buf, code): - if self.newline == "\r\n": - code = code.replace("\r\n", "\n") - buf.append(code) - #if code[-1] != '\n': - # buf.append(self.newline) - - def _set_spaces(self, code, lspace, mspace): - if lspace: - if mspace == " ": - code = lspace + code - elif mspace == "\t": - code = lspace + "\t" + code - #i = code.rstrip().rfind("\n") - #if i < 0: # i == -1 - # i = 0 - #else: - # i += 1 - i = code.rstrip().rfind("\n") + 1 - indent = 0 - n = len(code) - ch = None - while i < n: - ch = code[i] - if ch == " ": indent += 1 - elif ch == "\t": indent += 8 - else: break - i += 1 - if ch: - if code.rstrip()[-1] == ':': - indent += self.indent - self._spaces = ' ' * indent - - def render(self, context=None, globals=None, _buf=None): - """Evaluate python code with context dictionary. - If _buf is None then return the result of evaluation as str, - else return None. - - context:dict (=None) - Context object to evaluate. If None then new dict is created. - globals:dict (=None) - Global object. If None then globals() is used. - _buf:list (=None) - If None then new list is created. - - ex. - >>> import tenjin - >>> from tenjin.helpers import escape, to_str - >>> template = tenjin.Template('example.pyhtml') - >>> context = {'items': ['foo','bar','baz'], 'title': 'example'} - >>> output = template.evaluate(context) - >>> print output, - """ - if context is None: - locals = context = {} - elif self.args is None: - locals = context.copy() - else: - locals = {} - if context.has_key('_engine'): - context.get('_engine').hook_context(locals) - locals['_context'] = context - if globals is None: - globals = sys._getframe(1).f_globals - bufarg = _buf - if _buf is None: - _buf = [] - locals['_buf'] = _buf - if not self.bytecode: - self.compile() - exec self.bytecode in globals, locals - if bufarg is None: - s = ''.join(_buf) - #if self.encoding: - # s = s.encode(self.encoding) - return s - else: - return None - - def compile(self): - """compile self.script into self.bytecode""" - self.bytecode = compile(self.script, self.filename or '(tenjin)', 'exec') - - -## -## preprocessor class -## - -class Preprocessor(Template): - - STMT_PATTERN = Template.compile_stmt_pattern('PY') - - def stmt_pattern(self): - return Preprocessor.STMT_PATTERN - - EXPR_PATTERN = re.compile(r'([#$])\{\{(.*?)\}\}', re.S) - - def expr_pattern(self): - return Preprocessor.EXPR_PATTERN - - #def get_expr_and_escapeflag(self, match): - # return match.group(2), match.group(1) == '$' - - def add_expr(self, buf, code, flag_escape=None): - if not code or code.isspace(): - return - code = "_decode_params(%s)" % code - Template.add_expr(self, buf, code, flag_escape) - - -## -## template engine class -## - -class Engine(object): - """Engine class of templates. - - ex. - >>> ## create engine - >>> import tenjin - >>> from tenjin.helpers import * - >>> prefix = 'user_' - >>> postfix = '.pyhtml' - >>> layout = 'layout.pyhtml' - >>> path = ['views'] - >>> engine = tenjin.Engine(prefix=prefix, postfix=postfix, - ... layout=layout, path=path, encoding='utf-8') - >>> ## evaluate template(='views/user_create.pyhtml') with context object. - >>> ## (layout template (='views/layout.pyhtml') are used.) - >>> context = {'title': 'Create User', 'user': user} - >>> print engine.render(':create', context) - >>> ## evaluate template without layout template. - >>> print engine.render(':create', context, layout=False) - - In template file, the followings are available. - * include(template_name, append_to_buf=True) : - Include other template - * _content : - Result of evaluating template (available only in layout file). - - ex. file 'layout.pyhtml': - - -
- -
-
- #{_content} -
- - - """ - - ## default value of attributes - prefix = '' - postfix = '' - layout = None - templateclass = Template - path = None - cache = False - preprocess = False - - def __init__(self, prefix=None, postfix=None, layout=None, path=None, cache=None, preprocess=None, templateclass=None, **kwargs): - """Initializer of Engine class. - - prefix:str (='') - Prefix string used to convert template short name to template filename. - postfix:str (='') - Postfix string used to convert template short name to template filename. - layout:str (=None) - Default layout template name. - path:list of str(=None) - List of directory names which contain template files. - cache:bool (=True) - Cache converted python code into file. - preprocess:bool(=False) - Activate preprocessing or not. - templateclass:class (=Template) - Template class which engine creates automatically. - kwargs:dict - Options for Template class constructor. - See document of Template.__init__() for details. - """ - if prefix: self.prefix = prefix - if postfix: self.postfix = postfix - if layout: self.layout = layout - if templateclass: self.templateclass = templateclass - if path is not None: self.path = path - if cache is not None: self.cache = cache - if preprocess is not None: self.preprocess = preprocess - self.kwargs = kwargs - self.templates = {} # template_name => Template object - - def to_filename(self, template_name): - """Convert template short name to filename. - ex. - >>> engine = tenjin.Engine(prefix='user_', postfix='.pyhtml') - >>> engine.to_filename('list') - 'list' - >>> engine.to_filename(':list') - 'user_list.pyhtml' - """ - if template_name[0] == ':' : - return self.prefix + template_name[1:] + self.postfix - return template_name - - def find_template_file(self, template_name): - """Find template file and return it's filename. - When template file is not found, IOError is raised. - """ - filename = self.to_filename(template_name) - if self.path: - for dirname in self.path: - filepath = dirname + os.path.sep + filename - if os.path.isfile(filepath): - return filepath - else: - if os.path.isfile(filename): - return filename - raise IOError('%s: filename not found (path=%s).' % (filename, repr(self.path))) - - def register_template(self, template_name, template): - """Register an template object.""" - if not hasattr(template, 'timestamp'): - template.timestamp = None # or time.time() - self.templates[template_name] = template - - def load_cachefile(self, cache_filename, template): - """load marshaled cache file""" - #template.bytecode = marshal.load(open(cache_filename, 'rb')) - dct = marshal.load(open(cache_filename, 'rb')) - template.args = dct['args'] - template.script = dct['script'] - template.bytecode = dct['bytecode'] - - def _load_cachefile_for_script(self, cache_filename, template): - s = open(cache_filename).read() - if s.startswith('#@ARGS '): - pos = s.find("\n") - args_str = s[len('#@ARGS '):pos] - template.args = args_str and args_str.split(', ') or [] - s = s[pos+1:] - else: - template.args = None - if template.encoding: - #s = s.decode(template.encoding) - s = s.decode('utf-8') - template.script = s - template.compile() - - def store_cachefile(self, cache_filename, template): - """store template into marshal file""" - dct = { 'args': template.args, - 'script': template.script, - 'bytecode': template.bytecode } - _write_file_with_lock(cache_filename, marshal.dumps(dct)) - - def _store_cachefile_for_script(self, cache_filename, template): - s = template.script - if template.encoding and isinstance(s, unicode): - s = s.encode(template.encoding) - #s = s.encode('utf-8') - if template.args is not None: - s = "#@ARGS %s\n%s" % (', '.join(template.args), s) - _write_file_with_lock(cache_filename, s) - - def cachename(self, filename): - return os.path.join(os.path.expanduser('~'),'.rst2pdf', os.path.basename(filename) + '.cache') - - def create_template(self, filename, _context, _globals): - """Read template file and create template object.""" - template = self.templateclass(None, **self.kwargs) - template.timestamp = time.time() - cache_filename = self.cachename(filename) - getmtime = os.path.getmtime - if not self.cache: - input = self.read_template_file(filename, _context, _globals) - template.convert(input, filename) - #template.compile() - elif os.path.exists(cache_filename) and getmtime(cache_filename) >= getmtime(filename): - #Tenjin.logger.info("** debug: %s: cache found." % filename) - template.filename = filename - self.load_cachefile(cache_filename, template) - if template.bytecode is None: - template.compile() - else: - #Tenjin.logger.info("** debug: %s: cache not found." % filename) - input = self.read_template_file(filename, _context, _globals) - template.convert(input, filename) - template.compile() - self.store_cachefile(cache_filename, template) - return template - - def read_template_file(self, filename, _context, _globals): - if not self.preprocess: - return open(filename).read() - if _context is None: - _context = {} - if not _context.has_key('_engine'): - self.hook_context(_context) - if _globals is None: - _globals = sys._getframe(2).f_globals - preprocessor = Preprocessor(filename) - return preprocessor.render(_context, globals=_globals) - - def get_template(self, template_name, _context=None, _globals=None): - """Return template object. - If template object has not registered, template engine creates - and registers template object automatically. - """ - template = self.templates.get(template_name) - t = template - if t is None or t.timestamp and t.filename and t.timestamp < os.path.getmtime(t.filename): - filename = self.find_template_file(template_name) - # context and globals are passed only for preprocessing - if _globals is None: - _globals = sys._getframe(1).f_globals - template = self.create_template(filename, _context, _globals) - self.register_template(template_name, template) - return template - - def include(self, template_name, append_to_buf=True): - """Evaluate template using current local variables as context. - - template_name:str - Filename (ex. 'user_list.pyhtml') or short name (ex. ':list') of template. - append_to_buf:boolean (=True) - If True then append output into _buf and return None, - else return stirng output. - - ex. - - #{include('file.pyhtml', False)} - - """ - frame = sys._getframe(1) - locals = frame.f_locals - globals = frame.f_globals - assert locals.has_key('_context') - context = locals['_context'] - # context and globals are passed to get_template() only for preprocessing. - template = self.get_template(template_name, context, globals) - if append_to_buf: - _buf = locals['_buf'] - else: - _buf = None - return template.render(context, globals, _buf=_buf) - - def render(self, template_name, context=None, globals=None, layout=True): - """Evaluate template with layout file and return result of evaluation. - - template_name:str - Filename (ex. 'user_list.pyhtml') or short name (ex. ':list') of template. - context:dict (=None) - Context object to evaluate. If None then new dict is used. - globals:dict (=None) - Global context to evaluate. If None then globals() is used. - layout:str or Bool(=True) - If True, the default layout name specified in constructor is used. - If False, no layout template is used. - If str, it is regarded as layout template name. - - If temlate object related with the 'template_name' argument is not exist, - engine generates a template object and register it automatically. - """ - if context is None: - context = {} - if globals is None: - globals = sys._getframe(1).f_globals - self.hook_context(context) - while True: - # context and globals are passed to get_template() only for preprocessing - template = self.get_template(template_name, context, globals) - content = template.render(context, globals) - layout = context.pop('_layout', layout) - if layout is True or layout is None: - layout = self.layout - if not layout: - break - template_name = layout - layout = False - context['_content'] = content - context.pop('_content', None) - return content - - def hook_context(self, context): - context['_engine'] = self - #context['render'] = self.render - context['include'] = self.include diff --git a/dist-packages/rst2pdf/rst2pdf/uniconvsaver.py b/dist-packages/rst2pdf/rst2pdf/uniconvsaver.py deleted file mode 100644 index 45fa5b51a..000000000 --- a/dist-packages/rst2pdf/rst2pdf/uniconvsaver.py +++ /dev/null @@ -1,382 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright (C) 2007-2008 by Igor Novikov -# Copyright (C) 2000, 2001, 2002 by Bernhard Herzog -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Library General Public -# License as published by the Free Software Foundation; either -# version 2 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Library General Public License for more details. -# -# You should have received a copy of the GNU Library General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA - -###Sketch Config -#type = Export -#tk_file_type = ("Portable Document Format (PDF)", '.pdf') -#extensions = '.pdf' -#format_name = 'PDF-Reportlab' -#unload = 1 -###End - -from math import atan2, pi -import PIL -from app import _,Bezier, EmptyPattern, Rotation, Translation, _sketch -from app.Graphics.curveop import arrow_trafos -import reportlab.pdfgen.canvas -import app - - -def make_pdf_path(pdfpath, paths): - for path in paths: - for i in range(path.len): - type, control, p, cont = path.Segment(i) - if type == Bezier: - p1, p2 = control - pdfpath.curveTo(p1.x, p1.y, p2.x, p2.y, p.x, p.y) - else: - if i > 0: - pdfpath.lineTo(p.x, p.y) - else: - pdfpath.moveTo(p.x, p.y) - if path.closed: - pdfpath.close() - return pdfpath - -class PDFDevice: - - has_axial_gradient = 0 - has_radial_gradient = 0 - has_conical_gradient = 0 - gradient_steps = 100 - - def __init__(self, pdf): - self.pdf = pdf - - def PushTrafo(self): - self.pdf.saveState() - - def Concat(self, trafo): - apply(self.pdf.transform, trafo.coeff()) - - def Translate(self, x, y = None): - if y is None: - x, y = x - self.pdf.translate(x, y) - - def Rotate(self, angle): - self.pdf.rotate(angle) - - def Scale(self, scale): - self.pdf.scale(scale, scale) - - def PopTrafo(self): - self.pdf.restoreState() - - PushClip = PushTrafo - PopClip = PopTrafo - - def SetFillColor(self, color): - self.pdf.setFillColor(tuple(color)) - - def SetLineColor(self, color): - self.pdf.setStrokeColor(tuple(color)) - - def SetLineAttributes(self, width, cap = 1, join = 0, dashes = ()): - self.pdf.setLineWidth(width) - self.pdf.setLineCap(cap - 1) - self.pdf.setLineJoin(join) - if dashes: - dashes = list(dashes) - w = width - if w < 1.0: - w = 1.0 - for i in range(len(dashes)): - dashes[i] = w * dashes[i] - self.pdf.setDash(dashes) - - def DrawLine(self, start, end): - self.pdf.line(start.x, start.y, end.x, end.y) - - def DrawLineXY(self, x1, y1, x2, y2): - self.pdf.line(x1, y1, x2, y2) - - def DrawRectangle(self, start, end): - self.pdf.rectangle(start.x, start.y, end.x - start.x, end.y - start.y, - 1, 0) - - def FillRectangle(self, left, bottom, right, top): - self.pdf.rect(left, bottom, right - left, top - bottom, 0, 1) - - def DrawCircle(self, center, radius): - self.pdf.circle(center.x, center.y, radius, 1, 0) - - def FillCircle(self, center, radius): - self.pdf.circle(center.x, center.y, radius, 0, 1) - - def FillPolygon(self, pts): - path = self.pdf.beginPath() - apply(path.moveTo, pts[0]) - for x, y in pts: - path.lineTo(x, y) - path.close() - self.pdf.drawPath(path, 0, 1) - - def DrawBezierPath(self, path, rect = None): - self.pdf.drawPath(make_pdf_path(self.pdf.beginPath(), (path,)), 1, 0) - - def FillBezierPath(self, path, rect = None): - self.pdf.drawPath(make_pdf_path(self.pdf.beginPath(), (path,)), 0, 1) - - - -class PDFGenSaver: - - def __init__(self, file, filename, document, options): - self.file = file - self.filename = filename - self.document = document - self.options = options - - # if there's a pdfgen_canvas option assume it's an instance of - # reportlab.pdfgen.canvas.Canvas that we should render on. This - # allows multiple documents to be rendered into the same PDF - # file or to have other python code outside of Sketch such as - # reportlab itself (more precisely one of its other components - # besides pdfgen) render into to too. - # - # The code here assumes that the canvas is already setup - # properly. - if options.has_key("pdfgen_canvas"): - self.pdf = options["pdfgen_canvas"] - else: - self.pdf = reportlab.pdfgen.canvas.Canvas(file) - self.pdf.setPageSize(document.PageSize()) - - def close(self): - if not self.options.has_key("pdfgen_canvas"): - self.pdf.save() - - def set_properties(self, properties, bounding_rect = None): - pattern = properties.line_pattern - if not pattern.is_Empty: - if pattern.is_Solid: - c, m, y, k =pattern.Color().getCMYK() - self.pdf.setStrokeColorCMYK(c, m, y, k) - self.pdf.setLineWidth(properties.line_width) - self.pdf.setLineJoin(properties.line_join) - self.pdf.setLineCap(properties.line_cap - 1) - dashes = properties.line_dashes - if dashes: - dashes = list(dashes) - w = properties.line_width - if w < 1.0: - w = 1.0 - for i in range(len(dashes)): - dashes[i] = w * dashes[i] - self.pdf.setDash(dashes) - active_fill = None - pattern = properties.fill_pattern - if not pattern.is_Empty: - if pattern.is_Solid: - c, m, y, k =pattern.Color().getCMYK() - self.pdf.setFillColorCMYK(c, m, y, k) - elif pattern.is_Tiled: - pass - elif pattern.is_AxialGradient: - active_fill = self.axial_gradient - else: - active_fill = self.execute_pattern - return active_fill - - def axial_gradient(self, properties, rect): - pattern = properties.fill_pattern - vx, vy = pattern.Direction() - angle = atan2(vy, vx) - pi / 2 - center = rect.center() - rot = Rotation(angle, center) - left, bottom, right, top = rot(rect) - trafo = rot(Translation(center)) - image = PIL.Image.new('RGB', (1, 200)) - border = int(round(100 * pattern.Border())) - _sketch.fill_axial_gradient(image.im, pattern.Gradient().Colors(), - 0, border, 0, 200 - border) - self.pdf.saveState() - apply(self.pdf.transform, trafo.coeff()) - self.pdf.drawInlineImage(image, (left - right) / 2, (bottom - top) / 2, - right - left, top - bottom) - self.pdf.restoreState() - - def execute_pattern(self, properties, rect): - device = PDFDevice(self.pdf) - properties.fill_pattern.Execute(device, rect) - - def make_pdf_path(self, paths): - return make_pdf_path(self.pdf.beginPath(), paths) - - def polybezier(self, paths, properties, bounding_rect, clip = 0): - pdfpath = self.make_pdf_path(paths) - active_fill = self.set_properties(properties, bounding_rect) - if active_fill: - if not clip: - self.pdf.saveState() - self.pdf.clipPath(pdfpath, 0, 0) - active_fill(properties, bounding_rect) - if not clip: - self.pdf.restoreState() - if properties.HasLine(): - self.pdf.drawPath(pdfpath, 1, 0) - else: - if clip: - method = self.pdf.clipPath - else: - method = self.pdf.drawPath - method(self.make_pdf_path(paths), properties.HasLine(), - properties.HasFill()) - # draw the arrows - if properties.HasLine(): - # Set the pdf fill color to the line color to make sure that - # arrows that are filled are filled with the line color of - # the object. Since lines are always drawn last, this - # shouldn't interfere with the object's fill. - c, m, y, k = properties.line_pattern.Color().getCMYK() - self.pdf.setFillColorCMYK(c, m, y, k) - arrow1 = properties.line_arrow1 - arrow2 = properties.line_arrow2 - if arrow1 or arrow2: - for path in paths: - t1, t2 = arrow_trafos(path, properties) - if arrow1 and t1 is not None: - self.draw_arrow(arrow1, t1) - if arrow2 and t2 is not None: - self.draw_arrow(arrow2, t2) - - def draw_arrow(self, arrow, trafo): - path = arrow.Paths()[0].Duplicate() - path.Transform(trafo) - pdfpath = self.make_pdf_path((path,)) - if arrow.IsFilled(): - self.pdf.drawPath(pdfpath, 0, 1) - else: - self.pdf.drawPath(pdfpath, 1, 0) - - def mask_group(self, object): - mask = object.Mask() - if not mask.has_properties: - # XXX implement this case (raster images) - return - if mask.is_curve: - self.pdf.saveState() - prop = mask.Properties().Duplicate() - prop.SetProperty(line_pattern = EmptyPattern) - self.polybezier(mask.Paths(), prop, mask.bounding_rect, clip = 1) - self.save_objects(object.MaskedObjects()) - if mask.has_line and mask.Properties().HasLine(): - prop = mask.Properties().Duplicate() - prop.SetProperty(fill_pattern = EmptyPattern) - self.polybezier(mask.Paths(), prop, mask.bounding_rect, - clip = 1) - self.pdf.restoreState() - - def raster_image(self, object): - self.pdf.saveState() - apply(self.pdf.transform, object.Trafo().coeff()) - self.pdf.drawInlineImage(object.Data().Image(), 0, 0) - self.pdf.restoreState() - - def simple_text(self, object, clip = 0): - properties = object.Properties() - active_fill = self.set_properties(properties, object.bounding_rect) - fontname = properties.font.PostScriptName() - if fontname not in self.pdf.getAvailableFonts(): - fontname = 'Times-Roman' - - if active_fill and not clip: - self.pdf.saveState() - pdftext = self.pdf.beginText() - if active_fill: - pdftext.setTextRenderMode(7) - elif clip: - pdftext.setTextRenderMode(4) - pdftext.setFont(fontname, properties.font_size) - apply(pdftext.setTextTransform, object.FullTrafo().coeff()) - pdftext.textOut(object.Text()) - self.pdf.drawText(pdftext) - if active_fill: - active_fill(properties, object.bounding_rect) - if not clip: - self.pdf.restoreState() - - def path_text(self, object, clip = 0): - properties = object.Properties() - active_fill = self.set_properties(properties, object.bounding_rect) - fontname = properties.font.PostScriptName() - if fontname not in self.pdf.getAvailableFonts(): - fontname = 'Times-Roman' - - if active_fill and not clip: - self.pdf.saveState() - pdftext = self.pdf.beginText() - if active_fill: - pdftext.setTextRenderMode(7) - elif clip: - pdftext.setTextRenderMode(4) - pdftext.setFont(fontname, properties.font_size) - trafos = object.CharacterTransformations() - text = object.Text() - for i in range(len(trafos)): - apply(pdftext.setTextTransform, trafos[i].coeff()) - pdftext.textOut(text[i]) - self.pdf.drawText(pdftext) - if active_fill: - active_fill(properties, object.bounding_rect) - if not clip: - self.pdf.restoreState() - - def Save(self): - self.document.updateActivePage() - masters=self.document.getMasterLayers() - count=0 - pagenum=len(self.document.pages) - interval=int(97/pagenum) - for page in self.document.pages: - count+=1 - app.updateInfo(inf2=_('Composing page %u of %u')%(count,pagenum),inf3=count*interval) - layers=page+masters - for layer in layers: - if not layer.is_SpecialLayer and layer.Printable(): - self.save_objects(layer.GetObjects()) - #self.pdf.showPage() - - def save_objects(self, objects): - for object in objects: - if object.is_Compound: - if object.is_MaskGroup: - self.mask_group(object) - else: - self.save_objects(object.GetObjects()) - elif object.is_SimpleText: -# self.simple_text(object) - obj=object.AsBezier() - self.polybezier(obj.Paths(), obj.Properties(), obj.bounding_rect) - elif object.is_PathTextText: - self.path_text(object) - elif object.is_Image: - self.raster_image(object) - elif object.is_Bezier or object.is_Rectangle or object.is_Ellipse: - self.polybezier(object.Paths(), object.Properties(), object.bounding_rect) - - - -def save(document, file, filename, options = {}): - app.updateInfo(inf1=_('PDF generation.'),inf2=_('Start document composing'),inf3=3) - saver = PDFGenSaver(file, filename, document, options) - saver.Save() - saver.close() - app.updateInfo(inf2=_('Document generation is finished'),inf3=100) diff --git a/dist-packages/rst2pdf/rst2pdf/utils.py b/dist-packages/rst2pdf/rst2pdf/utils.py deleted file mode 100644 index 671f45664..000000000 --- a/dist-packages/rst2pdf/rst2pdf/utils.py +++ /dev/null @@ -1,417 +0,0 @@ -# -*- coding: utf-8 -*- -# See LICENSE.txt for licensing terms -#$URL$ -#$Date$ -#$Revision$ - -import shlex - -from flowables import * -import rst2pdf.flowables -from styles import adjustUnits -from log import log, nodeid - -def parseRaw(data, node): - """Parse and process a simple DSL to handle creation of flowables. - - Supported (can add others on request): - - * PageBreak - - * Spacer width, height - - """ - elements = [] - lines = data.splitlines() - for line in lines: - lexer = shlex.shlex(line) - lexer.whitespace += ',' - tokens = list(lexer) - if not tokens: - continue # Empty line - command = tokens[0] - if command == 'PageBreak': - if len(tokens) == 1: - elements.append(MyPageBreak()) - else: - elements.append(MyPageBreak(tokens[1])) - elif command == 'EvenPageBreak': - if len(tokens) == 1: - elements.append(MyPageBreak(breakTo='even')) - else: - elements.append(MyPageBreak(tokens[1],breakTo='even')) - elif command == 'OddPageBreak': - if len(tokens) == 1: - elements.append(MyPageBreak(breakTo='odd')) - else: - elements.append(MyPageBreak(tokens[1],breakTo='odd')) - elif command == 'FrameBreak': - if len(tokens) == 1: - elements.append(CondPageBreak(99999)) - else: - elements.append(CondPageBreak(float(tokens[1]))) - elif command == 'Spacer': - elements.append(MySpacer(adjustUnits(tokens[1]), - adjustUnits(tokens[2]))) - elif command == 'Transition': - elements.append(Transition(*tokens[1:])) - elif command == 'SetPageCounter': - elements.append(flowables.PageCounter(*tokens[1:])) - else: - log.error('Unknown command %s in raw pdf directive [%s]'%(command,nodeid(node))) - return elements - -from reportlab.lib.colors import Color, CMYKColor, getAllNamedColors, toColor, \ - HexColor - -HAS_XHTML2PDF = True -try: - from xhtml2pdf.util import COLOR_BY_NAME - from xhtml2pdf.util import memoized - from xhtml2pdf.context import pisaContext - from xhtml2pdf.default import DEFAULT_CSS - from xhtml2pdf.parser import pisaParser,pisaGetAttributes - from xhtml2pdf.document import pisaStory - from reportlab.platypus.flowables import Spacer - from reportlab.platypus.frames import Frame - from xhtml2pdf.xhtml2pdf_reportlab import PmlBaseDoc, PmlPageTemplate - from xhtml2pdf.util import pisaTempFile, getBox, pyPdf - import xhtml2pdf.parser as pisa_parser -except ImportError: - try: - from sx.pisa3.pisa_util import COLOR_BY_NAME - memoized = lambda *a: a - from sx.pisa3.pisa_context import pisaContext - from sx.pisa3.pisa_default import DEFAULT_CSS - from sx.pisa3.pisa_parser import pisaParser,pisaGetAttributes - from sx.pisa3.pisa_document import pisaStory - from reportlab.platypus.flowables import Spacer - from reportlab.platypus.frames import Frame - from sx.pisa3.pisa_reportlab import PmlBaseDoc, PmlPageTemplate - from sx.pisa3.pisa_util import pisaTempFile, getBox, pyPdf - import sx.pisa3.pisa_parser as pisa_parser - except ImportError: - HAS_XHTML2PDF = False - - -if HAS_XHTML2PDF: - - COLOR_BY_NAME['initial'] = Color(0, 0, 0) - - - @memoized - def getColor2(value, default=None): - """ - Convert to color value. - This returns a Color object instance from a text bit. - """ - - if isinstance(value, Color): - return value - value = str(value).strip().lower() - if value == "transparent" or value == "none": - return default - if value in COLOR_BY_NAME: - return COLOR_BY_NAME[value] - if value.startswith("#") and len(value) == 4: - value = "#" + value[1] + value[1] + value[2] + value[2] + value[3] + value[3] - elif rgb_re.search(value): - # e.g., value = "", go figure: - r, g, b = [int(x) for x in rgb_re.search(value).groups()] - value = "#%02x%02x%02x" % (r, g, b) - else: - # Shrug - pass - return toColor(value, default) # Calling the reportlab function - - #import xhtml2pdf.util - #xhtml2pdf.util.getColor = getColor2 - - import cgi - import logging - from xml.dom import Node - - - - def pisaPreLoop2(node, context, collect=False): - """ - Collect all CSS definitions - """ - - data = u"" - if node.nodeType == Node.TEXT_NODE and collect: - data = node.data - - elif node.nodeType == Node.ELEMENT_NODE: - name = node.tagName.lower() - - # print name, node.attributes.items() - if name in ("style", "link"): - attr = pisaGetAttributes(context, name, node.attributes) - print " ", attr - media = [x.strip() for x in attr.media.lower().split(",") if x.strip()] - # print repr(media) - - if (attr.get("type", "").lower() in ("", "text/css") and ( - not media or - "all" in media or - "print" in media or - "pdf" in media)): - - if name == "style": - for node in node.childNodes: - data += pisaPreLoop2(node, context, collect=True) - #context.addCSS(data) - return u"" - #collect = True - - if name == "link" and attr.href and attr.rel.lower() == "stylesheet": - # print "CSS LINK", attr - context.addCSS('\n@import "%s" %s;' % (attr.href, ",".join(media))) - # context.addCSS(unicode(file(attr.href, "rb").read(), attr.charset)) - #else: - # print node.nodeType - - for node in node.childNodes: - result = pisaPreLoop2(node, context, collect=collect) - if collect: - data += result - - return data - - - pisa_parser.pisaPreLoop = pisaPreLoop2 - - - HTML_CSS = """ - html { - font-family: Helvetica; - font-size: 7px; - font-weight: normal; - color: #000000; - background-color: transparent; - margin: 0; - padding: 0; - line-height: 150%; - border: 1px none; - display: inline; - width: auto; - height: auto; - white-space: normal; - } - - b, - strong { - font-weight: bold; - } - - i, - em { - font-style: italic; - } - - u { - text-decoration: underline; - } - - s, - strike { - text-decoration: line-through; - } - - a { - text-decoration: underline; - color: blue; - } - - ins { - color: green; - text-decoration: underline; - } - del { - color: red; - text-decoration: line-through; - } - - pre, - code, - kbd, - samp, - tt { - font-family: "Courier New"; - } - - h1, - h2, - h3, - h4, - h5, - h6 { - font-weight:bold; - -pdf-outline: true; - -pdf-outline-open: false; - } - - h1 { - /*18px via YUI Fonts CSS foundation*/ - font-size:138.5%; - -pdf-outline-level: 0; - } - - h2 { - /*16px via YUI Fonts CSS foundation*/ - font-size:123.1%; - -pdf-outline-level: 1; - } - - h3 { - /*14px via YUI Fonts CSS foundation*/ - font-size:108%; - -pdf-outline-level: 2; - } - - h4 { - -pdf-outline-level: 3; - } - - h5 { - -pdf-outline-level: 4; - } - - h6 { - -pdf-outline-level: 5; - } - - h1, - h2, - h3, - h4, - h5, - h6, - p, - pre, - hr { - margin:1em 0; - } - - address, - blockquote, - body, - center, - dl, - dir, - div, - fieldset, - form, - h1, - h2, - h3, - h4, - h5, - h6, - hr, - isindex, - menu, - noframes, - noscript, - ol, - p, - pre, - table, - th, - tr, - td, - ul, - li, - dd, - dt, - pdftoc { - display: block; - } - - table { - } - - tr, - th, - td { - - vertical-align: middle; - width: auto; - } - - th { - text-align: center; - font-weight: bold; - } - - center { - text-align: center; - } - - big { - font-size: 125%; - } - - small { - font-size: 75%; - } - - - ul { - margin-left: 1.5em; - list-style-type: disc; - } - - ul ul { - list-style-type: circle; - } - - ul ul ul { - list-style-type: square; - } - - ol { - list-style-type: decimal; - margin-left: 1.5em; - } - - pre { - white-space: pre; - } - - blockquote { - margin-left: 1.5em; - margin-right: 1.5em; - } - - noscript { - display: none; - } - """ - - def parseHTML(data, node): - dest=None - path=None - link_callback=None - debug=0 - default_css=HTML_CSS - xhtml=False - encoding=None - xml_output=None - raise_exception=True - capacity=100*1024 - - # Prepare simple context - context = pisaContext(path, debug=debug, capacity=capacity) - context.pathCallback = link_callback - - # Build story - context = pisaStory(data, path, link_callback, debug, default_css, xhtml, - encoding, context=context, xml_output=xml_output) - return context.story - -else: # no xhtml2pdf - def parseHTML(data, none): - log.error("You need xhtml2pdf installed to use the raw HTML directive.") - return [] diff --git a/dist-packages/rst2pdf/rst2pdf/writer.py b/dist-packages/rst2pdf/rst2pdf/writer.py deleted file mode 100644 index 84ad7eb3e..000000000 --- a/dist-packages/rst2pdf/rst2pdf/writer.py +++ /dev/null @@ -1,33 +0,0 @@ -# -*- coding: utf-8 -*- -# See LICENSE.txt for licensing terms - -from StringIO import StringIO - -from docutils import writers - -from rst2pdf import createpdf - - -class PdfWriter(writers.Writer): - - def __init__(self, builder): - writers.Writer.__init__(self) - self.builder = builder - self.output = u'' - - supported = ('pdf') - """Formats this writer supports.""" - - config_section = 'pdf writer' - config_section_dependencies = ('writers') - """Final translated form of `document`.""" - - def translate(self): - sio = StringIO('') - createpdf.RstToPdf(sphinx=True).createPdf( - doctree=self.document, output=sio, compressed=False) - self.output = unicode(sio.getvalue(), 'utf-8', 'ignore') - - def supports(self, format): - """This writer supports all format-specific elements.""" - return 1 diff --git a/dist-packages/rst2pdf/setup.cfg b/dist-packages/rst2pdf/setup.cfg deleted file mode 100644 index ee7b0e093..000000000 --- a/dist-packages/rst2pdf/setup.cfg +++ /dev/null @@ -1,3 +0,0 @@ -[egg_info] -tag_build = .dev -tag_svn_revision = true diff --git a/dist-packages/rst2pdf/setup.py b/dist-packages/rst2pdf/setup.py deleted file mode 100644 index 0610c21fd..000000000 --- a/dist-packages/rst2pdf/setup.py +++ /dev/null @@ -1,113 +0,0 @@ -# -*- coding: utf-8 -*- -#$HeadURL$ -#$LastChangedDate$ -#$LastChangedRevision$ - -import os -from setuptools import setup, find_packages - -version = '0.93' - -def read(*rnames): - return open(os.path.join(os.path.dirname(__file__), *rnames)).read() - -long_description = ( - read('LICENSE.txt') - + '\n' + - 'Detailed Documentation\n' - '**********************\n' - + '\n' + - read('README.rst') - + '\n' + - 'Contributors\n' - '************\n' - - - + '\n' + - read('Contributors.txt') - + '\n' + - 'Change history\n' - '**************\n' - + '\n' + - read('CHANGES.txt') - + '\n' + - 'Download\n' - '********\n' - ) - -install_requires = [ - 'setuptools', - 'docutils', - 'reportlab>=2.4', - 'Pygments', - 'pdfrw', - ] - -try: - import json -except ImportError: - install_requires.append('simplejson') - -tests_require = ['pyPdf'] -sphinx_require = ['sphinx'] -hyphenation_require = ['wordaxe>=1.0'] -images_require = ['PIL'] -pdfimages_require = ['pyPdf','PythonMagick'] -pdfimages2_require = ['pyPdf','SWFTools'] -svgsupport_require = ['svg2rlg'] -aafiguresupport_require = ['aafigure>=0.4'] -mathsupport_require = ['matplotlib'] -rawhtmlsupport_require = ['xhtml2pdf'] - -setup( - name="rst2pdf", - version=version, - packages=find_packages(exclude=['ez_setup', 'examples', 'tests']), - package_data=dict(rst2pdf=['styles/*.json', - 'styles/*.style', - 'images/*png', - 'images/*jpg', - 'templates/*tmpl' - ]), - include_package_data=True, - dependency_links=[ - ], - install_requires=install_requires, - tests_require=tests_require, - extras_require=dict( - tests=tests_require, - sphinx=sphinx_require, - hyphenation=hyphenation_require, - images=images_require, - pdfimages=pdfimages_require, - pdfimages2=pdfimages2_require, - svgsupport=svgsupport_require, - aafiguresupport=aafiguresupport_require, - mathsupport=mathsupport_require, - rawhtmlsupport=rawhtmlsupport_require, - ), - # metadata for upload to PyPI - # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers - classifiers=[ - 'Development Status :: 4 - Beta', - 'Environment :: Console', - 'Intended Audience :: Developers', - 'License :: OSI Approved :: MIT License', - 'Operating System :: OS Independent', - 'Programming Language :: Python', - 'Topic :: Documentation', - 'Topic :: Software Development :: Libraries :: Python Modules', - 'Topic :: Text Processing', - 'Topic :: Utilities', - ], - author="Roberto Alsina", - author_email="ralsina at netmanagers dot com dot ar", - description="Convert restructured text to PDF via reportlab.", - long_description=long_description, - license="MIT", - keywords="restructured convert rst pdf docutils pygments reportlab", - url="http://rst2pdf.googlecode.com", - download_url="http://code.google.com/p/rst2pdf/downloads/list", - entry_points={'console_scripts': ['rst2pdf = rst2pdf.createpdf:main']}, - test_suite='rst2pdf.tests.test_rst2pdf.test_suite', -) diff --git a/dist-packages/wordaxe/.gitignore b/dist-packages/wordaxe/.gitignore deleted file mode 100644 index 0d20b6487..000000000 --- a/dist-packages/wordaxe/.gitignore +++ /dev/null @@ -1 +0,0 @@ -*.pyc diff --git a/dist-packages/wordaxe/LICENSE b/dist-packages/wordaxe/LICENSE deleted file mode 120000 index 7ca850b69..000000000 --- a/dist-packages/wordaxe/LICENSE +++ /dev/null @@ -1 +0,0 @@ -docs/license.txt \ No newline at end of file diff --git a/dist-packages/wordaxe/README.txt b/dist-packages/wordaxe/README.txt deleted file mode 120000 index fcf29580a..000000000 --- a/dist-packages/wordaxe/README.txt +++ /dev/null @@ -1 +0,0 @@ -docs/install.txt \ No newline at end of file diff --git a/dist-packages/wordaxe/setup.py b/dist-packages/wordaxe/setup.py deleted file mode 100755 index eaf31bc79..000000000 --- a/dist-packages/wordaxe/setup.py +++ /dev/null @@ -1,65 +0,0 @@ -#!/usr/bin/env python -# -*- coding: UTF-8 -*- - -from distutils.core import setup - - -setup( - name = "wordaxe", - version = "1.0.1", - description = "Provide hyphenation for python programs and ReportLab paragraphs.", - long_description = "Provide hyphenation for python programs and ReportLab paragraphs.", - author = "Henning von Bargen", - author_email = "henning.vonbargen@arcor.de", - maintainer = "Henning von Bargen", - maintainer_email = "henning.vonbargen@arcor.de", - license = ["Apache License, version 2.0", "Free BSD License"], - platforms = ["Unix", "Windows", "generic"], - keywords = ["multi-language", "text processing", "hyphenation", "paragraphs", "reportlab"], - url = "http://deco-cow.sourceforge.net", - download_url = "http://sourceforge.net/project/platformdownload.php?group_id=105867", - packages = ["wordaxe", "wordaxe/rl", "wordaxe/plugins", "wordaxe/dict"], - package_data = {"wordaxe": ["dict/*.dic" ]} -) - - -# Backup original Reportlab's file rl_codecs.py -> rl_codecs.py.bak -# and replace the original with the one in hyphenation/rl if needed. - -import sys - -setupCommand = sys.argv[-1] - -if setupCommand == "install": - - from shutil import copy2 - try: - from hashlib import md5 - except ImportError: # Python < 2.5 - from md5 import new as md5 - - def fileHash(path): - """Return MD5 hash of an entire file.""" - h = md5() - h.update(open(path, "rb").read()) - return h.hexdigest() - - try: - import reportlab - if reportlab.Version <= "2.3": - from reportlab.pdfbase import rl_codecs - src = rl_codecs.__file__ - if src.endswith(".pyc"): - src = src[:-1] - new = "wordaxe/rl/rl_codecs.py" - if fileHash(src) != fileHash(new): - bak = src + ".bak" - print "backing up %s -> %s" % (src, bak) - copy2(src, bak) - print "copying %s -> %s" % (new, src) - copy2(new, src) - else: - print "no update of '%s' needed" % src - except ImportError: - print "Note: ReportLab is not properly installed." - diff --git a/dist-packages/wordaxe/wordaxe/BaseHyphenator.py b/dist-packages/wordaxe/wordaxe/BaseHyphenator.py deleted file mode 100755 index f4eadc1b9..000000000 --- a/dist-packages/wordaxe/wordaxe/BaseHyphenator.py +++ /dev/null @@ -1,336 +0,0 @@ -#!/usr/bin/env python -# -*- coding: iso-8859-1 -*- - -__license__=""" - Copyright 2004-2008 Henning von Bargen (henning.vonbargen arcor.de) - This software is dual-licenced under the Apache 2.0 and the - 2-clauses BSD license. For details, see license.txt -""" - -__version__=''' $Id: __init__.py,v 1.2 2004/05/31 22:22:12 hvbargen Exp $ ''' - -__doc__=''' -The BaseHyphenator is the most basic hyphenator and should work for -any language. -''' - -import sys -import logging -logging.basicConfig() -log = logging.getLogger("BaseHyphenator") -log.setLevel(logging.INFO) - -from xml.sax.saxutils import escape,quoteattr -import codecs -from wordaxe.hyphen import * - -class Stripper: - """ - A helper class for stripping words. - """ - STD_PREFIX_CHARS = u"""'"([{¿""" - STD_SUFFIX_CHARS = u"""'')]}?!.,;:""" - - def __init__(self, prefix_chars=STD_PREFIX_CHARS, - suffix_chars=STD_SUFFIX_CHARS): - self.prefix_chars = prefix_chars - self.suffix_chars = suffix_chars - - def strip(self, word): - """ - Returns a tuple (prefix, base, postfix) - such that word = prefix+base+postfix. - """ - lenword = len(word) - offs_l = 0 - offs_r = lenword - while offs_l < lenword and word[offs_l] in self.prefix_chars: - offs_l += 1 - while offs_r > offs_l and word[offs_r-1] in self.suffix_chars: - offs_r -= 1 - return word[:offs_l], word[offs_l:offs_r], word[offs_r:] - - def apply_stripped(self, func, hyphenator, word, *args, **kwargs): - """ - Apply a hyphenation function for a word, - but strips prefix and suffix characters before. - Afterwards, these are added again. - """ - assert isinstance(word, unicode) - prefix, base, suffix = self.strip(word) - if func.im_self is None: - result = func(hyphenator, base, *args, **kwargs) - else: - assert func.im_self is hyphenator - result = func(base, *args, **kwargs) - if result is None: - return None - if isinstance(result, HyphenatedWord): - if prefix: - result = result.prepend(prefix) - if suffix: - result = result.append(suffix) - else: - result = prefix + result + suffix - return result - -# Test -assert Stripper().strip(u"(Wie denn?") == (u"(", u"Wie denn", u"?") - - -class BaseHyphenator(Hyphenator): - """ - This hyphenator is the most basic hyphenator which should work for - any language. - It only hyphenates a word after one of the following characters: - - minus sign (45, '\x2D') - . dot (46, '\x2E') (depending on its position) - _ underscore (95, '\x5F') - ­ shy hyphenation character (173, '\xAD'). - - Optionally, hyphenation points can be added for CamelCase words - (CamelCase => Camel-Case). - """ - - stripper = Stripper() # Mit den Standard-Einstellungen - - def hyph(self,word): - """ - This is the non-recursive hyphenation function. - """ - #print "BaseHyphenator hyph", word - hword = HyphenatedWord(word, hyphenations=[]) - # strip common prefix- and suffix-characters - l = len(word) - if l < self.minWordLength: - return hword - # @TODO better use a regular expression for number/date detection - hyphenations = [] - for p in range(1,l-1): - if word[p] in [u"-", self.shy]: - hyphenations.append(HyphenationPoint(p+1,9,0,u"",0,u"")) - elif word[p] in ".,": - if word[p-1] in u"-+0123456789" and p+1 - -Silbentrennung - - - -

Silbentrennung Trennungstest

-

-""") - for w in wordlist: - if w=="\n": - if htmlFile: htmlFile.write("

\n

") - continue - # enthält das Wort mindestens zwei verschiedene Buchstaben? - wlower = w.lower() - nletters = 0 - for ch in letters: - if ch in wlower: - nletters += 1 - if nletters >= 2: - cntWords += 1 - if len(w) < self.minWordLength: - cntTooShort += 1 - if htmlFile: - htmlFile.write("%s\n" % escape(w)) - else: - #print w - loesung = self.hyphenate(w) - if loesung is None: - if wlower not in unknownWords: - unknownWords.append(wlower) - elif loesung.hyphenations: - cntOK += 1 - if htmlFile: - x = w - ins=0 - for h in loesung.hyphenations: - if h.nl==0 and h.sl==self.shy: - x = x[:ins+h.indx]+h.sl+x[ins+h.indx:] - ins += 1 - htmlFile.write("%s\n" % (quoteattr("ok"), quoteattr(str(loesung.hyphenations)), x)) - elif VERBOSE: - print w, loesung.hyphenations - else: - if htmlFile: - htmlFile.write("%s\n" % (quoteattr("nicht trennbar"), quoteattr(hint), escape(w))) - elif VERBOSE: - print w, "Nicht trennbar:", repr(loesung) - if htmlFile: - htmlFile.write("

") - - return (cntWords, cntOK, cntTooShort, unknownWords) - - - def test(self, encoding="iso-8859-1", outfname="out.html"): - """ - Testfunktion (Aufruf aus einem Hauptprogramm). - """ - import sys - import time - wortliste = [] - args = sys.argv[1:] - - runs = 1 - out = None - verbose = False - while len(args): - w,args = args[0],args[1:] - if w=="-v": - verbose = True - elif w=="-r": - runs,args = int(args[0]), args[1:] - elif w=="-g": - GENHTML = True - out = codecs.open(outfname, "wt", encoding) - elif w=="-f": - fname,args = args[0],args[1:] - for zeile in codecs.open(fname,"rt", encoding): - spl = zeile.split() - wortliste += spl - if not spl: wortliste.append("\n") - else: - wortliste.append(w.decode(encoding)) - print "%r" % wortliste - - #timer = timeit.Timer(stmt="result=h.learn(wortliste)") - #timer.timeit(runs) - startzeit = time.clock() - for x in range(runs): - print "run %d" % x - result = self.learn(wortliste, VERBOSE=verbose, htmlFile=out) - endezeit = time.clock() - cntWords,cntOK,cntTooShort,unknownWords = result - - print """ - - Statistics: - ----------- - Words processed :%6d - Bytes processed :%6d - Short words :%6d - Unknown words :%6d - """ % (cntWords, sum(map(len,wortliste)), cntTooShort, len(unknownWords)) - - secs = endezeit-startzeit - print "%d runs" % runs - print "Time :%3.3f seconds" % secs - if cntWords: - print " = %1.6f secs per 1000 words and run" % (1000* secs / (runs * cntWords)) - - unknownWords.sort() - print "unknown words (sorted):" - w_ = None - for w in unknownWords: - if w!=w_: - if len(w)>6: print "%-15.15s" % w, - w_ = w - - - def testWordList(self, fname, encoding, error): - """ - Test a file containing a list of word and hyphenated word in the form - someword some-word - E.g. each line is a test case. - The test outputs those words where the hyphenated version does not match - the expected output. - """ - for zeile in codecs.open(fname,"rt", encoding): - assert isinstance(zeile,unicode) - word, expected = zeile.strip().split() - loesung = self.hyphenate(word) - if loesung is None: - output = word # unknown word - elif loesung.hyphenations: - x = word - ins=0 - for h in loesung.hyphenations: - h.sl = h.sl.replace(SHY, "-") - ###if h.nl==0 and h.sl==self.shy: - x = x[:ins+h.indx]+h.sl+x[ins+h.indx:] - ins += len(h.sl) - h.nl - output = x - else: - output = word - if output != expected: - error ("%r: output=%r but expected=%r", word, output, expected) - - -if __name__=="__main__": - print "Testing BaseHyphenator:" - h = BaseHyphenator("DE",5, CamelCase=True) - if len(sys.argv) > 1: - h.test(outfname="ExplicitLearn.html") - sys.exit() - #assert h.hyphenate("Exklusiv-Demo") == [(9,9,None)] - for word in ["Exklusiv-Demo", "CamelCase", "18.10.2003", "1,2345", "i.e", "z.B.", "-0.1234", "reportlab-users", "no_data_found", "-12345", "12345-", "1-2345", "1234-5"]: - hyphWord = h.hyphenate(word.decode("iso-8859-1")) - if hyphWord is not None: - print "%s -- %s" % (word, hyphWord.showHyphens()) - else: - print "%s -- unknown" % word - - hw = HyphenatedWord("Schiffahrtskapitänsbackenzahn".decode("iso-8859-1"), []) - # 0 1 2 - # 01234567890123456789012345678 - hw.hyphenations = [HyphenationPoint(5,8,0,u"f"+SHY,0,u""), #schif(f)-fahrt - #HyphenationPoint(6,8,0,SHY,0,u"f"), # eine Alternative Darstellung mit gleichem Ergebnis - HyphenationPoint(11,9,0,SHY,0,u""), #fahrts-ka - HyphenationPoint(13,4,0,SHY,0,u""), #ka-pi - HyphenationPoint(15,4,0,SHY,0,u""), #pi-täns - HyphenationPoint(19,9,0,SHY,0,u""), #täns-ba - HyphenationPoint(22,4,1,u"k"+SHY,0,""), # bak-ken nach alten Regeln - HyphenationPoint(25,9,0,SHY,0,u""), # ken-zahn - ] - print "%s -- %s" % (hw,hw.showHyphens()) - while len(hw.hyphenations): - l,r = hw.split(hw.hyphenations[0]) - print "%s%s" % (l,r) - hw=r diff --git a/dist-packages/wordaxe/wordaxe/DCWHyphenator.py b/dist-packages/wordaxe/wordaxe/DCWHyphenator.py deleted file mode 100755 index 0dabbb4de..000000000 --- a/dist-packages/wordaxe/wordaxe/DCWHyphenator.py +++ /dev/null @@ -1,756 +0,0 @@ -#!/usr/bin/env python -# -*- coding: UTF-8 -*- - -__license__=""" - Copyright 2004-2008 Henning von Bargen (henning.vonbargen arcor.de) - This software is dual-licenced under the Apache 2.0 and the - 2-clauses BSD license. For details, see license.txt -""" - -__version__=''' $Id: __init__.py,v 1.2 2004/05/31 22:22:12 hvbargen Exp $ ''' - -import os,sys -import copy -import operator -import codecs - -from wordaxe.hyphen import SHY,HyphenationPoint,HyphenatedWord -import time -from wordaxe.BaseHyphenator import Stripper, BaseHyphenator -from wordaxe.ExplicitHyphenator import ExplicitHyphenator - -from wordaxe.hyphrules import HyphRule, RULES, AlgorithmError - -from wordaxe.hyphrules import NO_CHECKS,StringWithProps,Prefix,Root,Suffix -from wordaxe.hyphrules import TRENNUNG,NO_SUFFIX,KEEP_TOGETHER -import wordaxe.dict.DEhyph as DEhyph - -DEBUG=0 - -import logging -logging.basicConfig() -log = logging.getLogger("DCW") -log.setLevel(logging.INFO) -if DEBUG: - log.setLevel(logging.DEBUG) - -class WordFrag: - """Helper class for a (partially) parsed WordFrag. - A WordFrag is made up from prefix_chars, prefix, root, suffix, and suffix_chars, - i.e. the german word "(unveränderbarkeit)!" - is a WordFrag ( "(", ["un","ver"], "änder", ["bar","keit"], ")!" ). - """ - - def __init__(self,konsonantenverkuerzung_3_2=False): - self.konsonantenverkuerzung_3_2 = konsonantenverkuerzung_3_2 - self.prefix_chars = "" - self.prefix = [] - self.root = None - self.suffix = None - self.suffix_chars = "" - self.checks = [[],[],[],[],[],[]] - - def isValid(self): - "Is the WordFrag (stand alone) a valid word?" - return False - - def __str__(self): - "String representation" - return self.__class__.__name__ - - def __repr__(self): - return self.__str__() - - def clone(self): - return copy.copy(self) - -class PrefixWordFrag(WordFrag): - """A WordFrag that does not yet contain the root. - """ - def __init__(self,tw,prefix_chars="",prefix=[]): - if tw is None: tw = WordFrag() - # Auch alle sonstigen Attribute der Vorlage mit übernehmen - # @TODO Dieser Code ist wirklich hässlich: - self.__dict__.update(tw.__dict__) - WordFrag.__init__(self,konsonantenverkuerzung_3_2=tw.konsonantenverkuerzung_3_2) - self.prefix_chars = prefix_chars or tw.prefix_chars - self.prefix = prefix or tw.prefix - - def __str__(self): - "String representation" - return "PrefixWF " + self.prefix_chars + "-".join([p.strval for p in self.prefix]) - - def clone(self): - n = copy.copy(self) - n.prefix = self.prefix[:] - return n - -class SuffixWordFrag(PrefixWordFrag): - """A WordFrag that does contain a root and eventually a suffix. - """ - def __init__(self,tw,root=None,suffix=[],suffix_chars=[]): - if tw is None: tw = PrefixWordFrag(None,[]) - PrefixWordFrag.__init__(self,tw) - self.root = root or tw.root - self.suffix = suffix - self.suffix_chars = suffix_chars - - def __str__(self): - "String representation" - return "SuffixWF " + self.prefix_chars + "-".join([p.strval for p in self.prefix]) + \ - "|" + self.root.strval + "|" + ":".join([s.strval for s in self.suffix]) + \ - (self.konsonantenverkuerzung_3_2 and "!3>2" or "") - - def clone(self): - n = copy.copy(self) - n.suffix = self.suffix[:] - - return n - - def isValid(self): - if not self.suffix: - for p in self.root.props: - if isinstance (p,NEED_SUFFIX): - return False - return True - -SWORD = SuffixWordFrag - -VOWELS = u"aeiouäöüy" - -ALTE_REGELN = False - -KONSTANTEN_VERKUERZUNG_3_2 = True - -VERBOSE = False - -GENHTML = False - -class DCWHyphenator(ExplicitHyphenator): - """ - Hyphenation by decomposition of composed words. - The German language has a lot of long words that are - composed of simple words. The German word - "Silbentrennung" (engl. hyphenation) is a good example - in itself. - It is a composition of the words "Silbe" (engl. syllable) - and "Trennung" (engl. "separation"). - Each simple word consists of 0 or more prefixes, 1 stem, - and 0 or more suffixes. - The principle of the algorithm is quite simple. - It uses a a base of known prefixes, stems and suffixes, - each of which may contain attributes that work as rules. - The rules define how these word fragments can be combined. - The algorithm then to decompose the whole word into a - series of simple words, where each simple word consists - of known fragments and fulfills the rules. - Then it uses another simple algorithm to hyphenate each - simple word. - For a given word, there may be more than one possible - decomposition into simple words. - The hyphenator only returns those hyphenation points that - ALL possible decompositions have in common. - - Note: - The algorithm has been inspired by the publications about - "Sichere sinnentsprechende Silbentrennung" from the - technical university of Vienna, Austria (TU Wien). - However, it is in no other way related to the closed-source - software "SiSiSi" software developed at the TU Wien. - For more information about the "SiSiSi" software, see the - web site "http://www.ads.tuwien.ac.at/research/SiSiSi/". - """ - - def __init__ (self, - language="DE", - minWordLength=4, - qHaupt=8, - qNeben=5, - qVorsilbe=5, - qSchlecht=3, - hyphenDir=None, - **options - ): - ExplicitHyphenator.__init__(self,language=language,minWordLength=minWordLength, **options) - - # Qualitäten für verschiedene Trennstellen - self.qHaupt=qHaupt - self.qNeben=qNeben - self.qVorsilbe=qVorsilbe - self.qSchlecht=qSchlecht - - # Stammdaten initialisieren - special_words = [] - self.roots = [] - self.prefixes = [] - self.suffixes = [] - self.prefix_chars = DEhyph.prefix_chars - self.suffix_chars = DEhyph.suffix_chars - self.maxLevel=20 - - # Statistikdaten initialisieren - self.numStatesExamined = 0 - - # [special_words] einlesen - for zeile in DEhyph.special_words.splitlines(): - # Leerzeilen und Kommentare überspringen - zeile = zeile.strip() - if not zeile or zeile.startswith("#"): - continue - if "=" in zeile: - word, trennung = zeile.split("=") - else: - zeile = zeile.split(",") - word = zeile.pop(0) - assert len(zeile) >= 1 - for attr in zeile: - if ":" in attr: - propnam, propval = attr.split(":") - else: - propnam, propval = attr, "" - if propnam == u"TRENNUNG": - trennung = propval - elif propnam == u"KEEP_TOGETHER": - trennung = word - else: - raise NameError("Unknown property for word %s: %s" % (word, propnam)) - pass # Attribut ignorieren - self.add_entry(word, trennung) - - - # roots, prefixes und suffixes einlesen. - # Bei diesen können noch - Komma-getrennt - Eigenschaften angegeben sein. - # Eine Eigenschaft hat die Form XXX oder XXX:a,b,c - for name in ["roots", "prefixes", "suffixes"]: - abschnitt = getattr(self, name) - zeilen = getattr(DEhyph, name) - assert isinstance(zeilen, unicode) - for zeile in zeilen.splitlines(): - # Leerzeilen und Kommentare überspringen - zeile = zeile.strip() - if not zeile or zeile.startswith("#"): - continue - # Aufteilen in word und props - zeile = zeile.split(",") - word = zeile.pop(0) - props = [] - if len(zeile) >= 1: - for attr in zeile: - if ":" in attr: - [propnam,propval] = attr.split(":") - else: - propnam = attr - propval = "" - try: - cls = RULES[propnam] - props.append(cls(propval)) # the class is the propnam - except KeyError: - raise NameError("Unknown property for word %s: %s" % (word,propnam)) - # Jeder abschnitt ist eine Liste von Tupeln (lae, L), wobei L - # ein Dictionary von Wörtern der Länge lae ist und dazu die Liste - # der möglichen Eigenschaften enthält (dasselbe Wort kann je nach - # Bedeutung unterschiedliche Eigenschaften haben). - lenword = len(word) - for (lae,L) in abschnitt: - if lae==lenword: - try: - L[word].append(props) - except KeyError: - L[word]=[props] - break - else: - abschnitt.append((lenword,{word:[props]})) - self.stripper = Stripper(self.prefix_chars, self.suffix_chars) - - def _zerlegeWort(self,zusgWort): - """" - Returns a list containing all possible decompositions. - The decomposition routine works as follows: - - A TODO list contains the cases that still have to be considered. - Each element in this list is a tuple - (cword,frag,remainder,checks) characterising the state precisely. - - Notation: - CWORD = compound word, a list of SWORDs - SWORD = simple word = prefix* root suffix* - - cword is a list containing the already parsed SWORDs. - frag is a fragment of the current SWORD. - remainder is the remainder of the unparsed words. - checks describes the checks we still have to do. - - A solution list contains the solutions found so far - (it is empty in the beginning). - - In the beginning, the TODO-list contains only one element, - the initial status: - ([], None, zusgWort, []) - - For the word "Wegbeschreibung", a status could - look like this: - ( [ SWORD([],Root("Weg"),[]) ], - SuffixWordFrag ([Prefix("be")],Root("schreib"),[]), - "ung", - [] - ) - - If the TODO list is empty, the solutions found are returned. - - Otherwise, one element of the list is removed and examined. - Depending on the frag, we try all possible extensions of the - frag with a prefix,root or postfix. - If a continuation is possible, then the continued frag - and is appended to the TODO list. - """ - - def mergeChecks(c1,c2): - """Create a new list of checks from c1 and c2 - """ - return map(operator.__add__,c1,c2) - - def do_check_frag(when,cword,frag,checks): - """Run the PRE_WORD or PRE_NEXT_WORD checks before appending frag to cword. - """ - for chk in checks[when]: - try: - if not chk.check(cword,when,frag): - #log.debug ("check (chk=%r, when=%d) failed for frag %r", chk, when, frag) - return False - except AlgorithmError: - log.error ("check %s when=%d : AlgorithmError for cword=%r, frag=%r", chk, when, cword, frag) - return False - return True - - def do_check_piece(when,frag,piece,checks): - """Run the PRE_PIECE or PRE_NEXT_PIECE checks before appending piece to frag. - """ - for chk in checks[when]: - if not chk.check(frag,when,piece): - log.debug ("check (chk=%r, when=%d) failed for piece %r", chk, when, piece.strval) - return False - return True - - def check_PRE_WORD(cword,frag,checks): - return do_check_frag(HyphRule.PRE_WORD,cword,frag,checks) - - def check_PRE_NEXT_WORD(cword,frag,checks): - return do_check_frag(HyphRule.PRE_NEXT_WORD,cword,frag,checks) - - def check_AT_END(cword,checks): - return do_check_frag(HyphRule.AT_END,cword,None,checks) - - def check_PRE_PIECE(frag,piece,checks): - return do_check_piece(HyphRule.PRE_PIECE,frag,piece,checks) - - def check_PRE_NEXT_PIECE(frag,piece,checks): - return do_check_piece(HyphRule.PRE_NEXT_PIECE,frag,piece,checks) - - def check_PRE_ROOT(frag,piece,checks): - return do_check_piece(HyphRule.PRE_ROOT,frag,piece,checks) - - # Initialization - solutions = [] - todo = [] - state = ( [], None, zusgWort, NO_CHECKS()) - todo.append (state) - - while todo: - - #log.debug ("todo=\n%r", todo) - - # Consider the next state - state = todo.pop() - (cword,frag,remainder,checks) = state - - log.debug ("Examining state: %r", state) - self.numStatesExamined += 1 - - # check if the SWORD can end here - if frag and frag.root \ - and check_PRE_WORD(cword,frag,checks) \ - and check_PRE_NEXT_WORD(cword,frag,checks): - #### log.warn ("@TODO: The above IF statement is DEFINITELY wrong - frag: %r", frag) - #### Ich bin mir da nicht mehr so sicher, es scheint doch richtig zu sein. - - #log.debug ("Since fragment has a root, add test with None.") - newChecks = NO_CHECKS() - newChecks[HyphRule.AT_END] = checks[HyphRule.AT_END] - todo.append( (cword+[frag],None,remainder,newChecks) ) - - if remainder=="": # we have reached the end of the word. - - if frag is None: # good, we have no incomplete fragment - - if check_AT_END(cword,checks): # the last checks are ok - log.debug ("found solution: %r", cword) - solutions.append(cword) - else: - pass - log.debug ("check_AT_END failed for %r", cword) - - else: # we have a fragment of an SWORD. - pass - #log.debug ("Incomplete or invalid sword fragment found at end of string.\n" + - # "We should already have added the case where fragment is None\n" + - # "to our todo list, so we just can skip this case: %r", frag) - - - else: # still more characters to parse - - if frag is None: - - log.debug ("frag is None, remainder=%r bei zerlegeWort %r", remainder, zusgWort) - - # check prefix characters - l = 0 - while l0: - ###HVB, 14.10.2006 geändert - ###newfrag = frag.clone() - ###newfrag.prefix_chars = remainder[:l] - ###r = remainder[l:] - ###todo.append ( (cword,newfrag,r,checks) ) - ###continue # do not examine the current state any more. - newfrag = PrefixWordFrag(None, prefix_chars=remainder[:l]) - r = remainder[l:] - todo.append ( (cword,newfrag,r,checks) ) - continue # do not examine the current state any more. - else: - # we need a fragment (even if it is empty) from here on. - frag = PrefixWordFrag(None) - - if not frag.root: # fragment has not yet a root. - - # check all possible prefixes. - #log.debug ("checking prefixes.") - for (lae,L) in self.prefixes: - l,r = remainder[:lae],remainder[lae:] - for eigenschaften in L.get(l,[]): - #log.debug ("trying prefix: %s with properties: %s", l,eigenschaften) - piece = Prefix(l,eigenschaften) - pChecks = piece.getChecks() - if check_PRE_PIECE(frag,piece,pChecks): - if check_PRE_NEXT_PIECE(frag,piece,checks): - # @TODO perhaps the next few lines could be faster and more elegant - newChecks = mergeChecks(checks,pChecks) - newChecks[HyphRule.PRE_PIECE] = [] - newChecks[HyphRule.PRE_NEXT_PIECE] = pChecks[HyphRule.PRE_NEXT_PIECE] - newfrag = copy.copy(frag) - newfrag.prefix = frag.prefix + [piece] - todo.append( (cword,newfrag,r,newChecks) ) - else: - pass # pre next piece checks failed - else: - pass # pre piece checks failed - - # check all possible roots. - #log.debug ("checking roots.") - for (lae,L) in self.roots: - l,r = remainder[:lae],remainder[lae:] - for eigenschaften in L.get(l,[]): - #log.debug ("trying root: %r with properties: %r", l,eigenschaften) - piece = Root(l,eigenschaften) - if check_PRE_ROOT(frag,piece,checks): - pChecks = piece.getChecks() - if check_PRE_PIECE(frag,piece,pChecks): - if check_PRE_NEXT_PIECE(frag,piece,checks): - # @TODO perhaps the next few lines could be faster and more elegant - newChecks = mergeChecks(checks,pChecks) - newChecks[HyphRule.PRE_PIECE] = [] - newChecks[HyphRule.PRE_NEXT_PIECE] = pChecks[HyphRule.PRE_NEXT_PIECE] - newfrag = SuffixWordFrag(frag,piece) - todo.append( (cword,newfrag,r,newChecks) ) - # Auch Verkürzung von 3 Konsonanten zu zweien berücksichtigen - if KONSTANTEN_VERKUERZUNG_3_2 and l[-1]==l[-2] and l[-1] not in VOWELS: - #log.debug ("konsonantenverkuerzung %s",l) - newChecks = mergeChecks(checks,pChecks) - newChecks[HyphRule.PRE_PIECE] = [] - # Konsonsantenverkürzung kommt nur bei Haupttrennstellen - # vor, nicht vor Suffixes. - newChecks[HyphRule.PRE_NEXT_PIECE] = [NO_SUFFIX()] + pChecks[HyphRule.PRE_NEXT_PIECE] - newPiece = Root(l,eigenschaften) - newfrag = SuffixWordFrag(frag,newPiece) - newfrag.konsonantenverkuerzung_3_2 = True - todo.append( (cword,newfrag,l[-1]+r,newChecks) ) - else: - pass # pre next piece checks failed - else: - pass # pre piece checks failed - else: # pre root checks failed - pass - - else: # fragment already has a root. - #log.debug ("checking suffixes.") - # check all possible suffixes. - for (lae,L) in self.suffixes: - l,r = remainder[:lae],remainder[lae:] - for eigenschaften in L.get(l,[]): - log.debug ("trying suffix: %r with properties: %s", l,eigenschaften) - piece = Suffix(l,eigenschaften) - pChecks = piece.getChecks() - if check_PRE_PIECE(frag,piece,pChecks): - if check_PRE_NEXT_PIECE(frag,piece,checks): - # @TODO perhaps the next few lines could be faster and more elegant - newChecks = mergeChecks(checks,pChecks) - newChecks[HyphRule.PRE_PIECE] = [] - newChecks[HyphRule.PRE_NEXT_PIECE] = pChecks[HyphRule.PRE_NEXT_PIECE] - newfrag = copy.copy(frag) - newfrag.suffix = frag.suffix + [piece] - todo.append( (cword,newfrag,r,newChecks) ) - - else: - log.debug("pre next piece checks failed") - pass # pre next piece checks failed - else: - log.debug("pre piece checks failed") - pass # pre piece checks failed - - # check suffix characters - if not frag.suffix_chars: - l = 0 - while l0: - newfrag = frag.clone() - newfrag.suffix_chars = remainder[:l] - r = remainder[l:] - if check_PRE_WORD(cword,frag,checks) \ - and check_PRE_NEXT_WORD(cword,frag,checks): - #log.debug ("@TODO: The above IF statement is definitely wrong.\n" + - # "We have to distinguish between the checks for CWORD and FRAG.\n" + - # "Thus it seems that we need TWO check variables.") - chks = NO_CHECKS(HyphRule.AT_END) + checks[HyphRule.AT_END:] - todo.append ( (cword+[newfrag],None,r,chks) ) - continue # do not examine the current state any more. - else: # checks failed - pass - else: # no suffix characters found - pass - else: - pass # we already have suffix characters. - - # Nothing more to do. - if VERBOSE: log.info ("returning %r", solutions) - return solutions - - # Hilfsfunktion - def schiebe(self,offset,L): - return [HyphenationPoint(h.indx+offset,h.quality,h.nl,h.sl,h.nr,h.sr) for h in L] - - def dudentrennung(self,wort,quality=None): - """ - The algorithm how to hyphenate a word - without knowing about the context. - - This code is quite specific to German! - For other languages, there may be totally different rules. - - This rule is known as "Ein-Konsonanten-Regel" in German. - The rule works (basically) as follows: - First, find the vowels in the word, - as they mark the syllables (one hyphenation point between - two vowels (but consider sequences of vowels counting as one). - If there are consonants between two vowels, - put all but the last consonant to the left syllable, - and only the last consonant to the right syllable - (therefore the name one-consonant-rule). - However, there are also sequences of consonants counting as one, - like "ch" or "sch". - """ - #print "dudentrennung: %s" % wort - if not quality: quality = self.qNeben - - assert isinstance(wort, unicode) - - # Jede Silbe muss mindestens einen Vokal enthalten - if len(wort) <= 2: - return [] - # Suche bis zum ersten Vokal - for vpos1 in range(len(wort)): - if wort[vpos1] in VOWELS: - if wort[vpos1-1:vpos1+1] != 'qu': - break - else: - # Kein Vokal enthalten! - return [] - # wort[vpos1] ist der erste Vokal - fertig = False - stpos = vpos1+1 - while not fertig: - fertig = True - # Suche bis zum zweiten Vokal - for vpos2 in range(stpos,len(wort)): - if wort[vpos2] in VOWELS: - break - else: - # Kein zweiter Vokal enthalten! - return [] - # wort[vpos2] ist der zweite Vokal - if vpos2==2 and wort[1] not in VOWELS: - # Nach Einkonsonantenregel bleibt als erste Silbe nur ein einzelner Buchstabe, - # z.B. o-ber. Das wollen wir nicht - stpos = vpos2+1 - fertig = False - if vpos2==vpos1+1: - # a sequence of two vowels, like German "ei" or "au", or English "ou" or "oi" - if wort[vpos1:vpos2+1] in [u'äu', u'au', u'eu', u'ei', u'ie', u'ee']: - # Treat the sequence as if it was one vowel! - stpos = vpos2+1 - fertig = False - else: - return [HyphenationPoint(vpos2,quality,0,self.shy,0,u"")] + self.schiebe(vpos2,self.dudentrennung(wort[vpos2:],quality)) - if wort[vpos2-3:vpos2] in [u'sch',]: - return [HyphenationPoint(vpos2-3,quality,0,self.shy,0,u"")] + self.schiebe(vpos2-3,self.dudentrennung(wort[vpos2-3:],quality)) - elif ALTE_REGELN and wort[vpos2-2:vpos2] in [u'st']: - return [HyphenationPoint(vpos2-2,quality,0,self.shy,0,u"")] + self.schiebe(vpos2-2,self.dudentrennung(wort[vpos2-2:],quality)) - elif ALTE_REGELN and wort[vpos2-2:vpos2] in [u'ck']: - return [HyphenationPoint(vpos2-1,quality,1,u"k"+self.shy,0,u"")] + self.schiebe(vpos2-1,self.dudentrennung(wort[vpos2-1:],quality)) - elif wort[vpos2-2:vpos2] in [u'ch',u'ck', u'ph']: - return [HyphenationPoint(vpos2-2,quality,0,self.shy,0,u"")] + self.schiebe(vpos2-2,self.dudentrennung(wort[vpos2-2:],quality)) - elif wort[vpos2-1] in VOWELS: - return [HyphenationPoint(vpos2 ,quality,0,self.shy,0,u"")] + self.schiebe(vpos2, self.dudentrennung(wort[vpos2:],quality)) - else: - return [HyphenationPoint(vpos2-1,quality,0,self.shy,0,u"")] + self.schiebe(vpos2-1,self.dudentrennung(wort[vpos2-1:],quality)) - - def zerlegeWort(self,zusgWort,maxLevel=20): - - #Wort erstmal normalisieren - assert isinstance(zusgWort,unicode) - zusgWort = zusgWort.lower().replace(u'Ä',u'ä').replace(u'Ö',u'ö').replace(u'Ü',u'ü') - lenword = len(zusgWort) - #print zusgWort - loesungen = [] - - L = self._zerlegeWort(zusgWort) - # Trennung für Wortstämme mit Endungen berichtigen - for W in L: - # Eine mögliche Lösung. Von dieser die einzelnen Wörter betrachten - Wneu = [] - offset = 0 - ok = True - #log.debug ("Versuche %r", W) - sr = "" - for i,w in enumerate(W): - if not ok: break - offset += len(w.prefix_chars) - if i>0: - # @TODO: Hier darf nicht fest shy stehen, da - # das letzte Wort mit "-" geendet haben könnte - lastWordSuffixChars = W[i-1].suffix_chars - if lastWordSuffixChars and lastWordSuffixChars[len(lastWordSuffixChars)-1][-1:] in [u"-",self.shy]: - Wneu.append(HyphenationPoint(offset,self.qHaupt,0,"",0,sr)) - else: - Wneu.append(HyphenationPoint(offset,self.qHaupt,0,self.shy,0,sr)) - if w.konsonantenverkuerzung_3_2: - sr = w.root.strval[-1] - else: - sr = u"" - - if w.prefix: - for f in w.prefix: - Wneu += self.schiebe(offset,self.dudentrennung(f.strval,self.qVorsilbe)) - offset += len(f.strval) - Wneu.append(HyphenationPoint(offset,7,0,self.shy,0,u"")) - # @TODO Qualität 7 ist hier fest eingebrannt - for p in w.root.props: - if isinstance(p,TRENNUNG) or isinstance(p,KEEP_TOGETHER): - st = p.args - break - else: - st = self.dudentrennung(w.root.strval,self.qSchlecht) - if len(st): - Wneu += self.schiebe(offset,st) - st,stLast = st[:-1],st[-1] - p = stLast.indx - offset += p - en = w.root.strval[p:]+(u"".join([s.strval for s in w.suffix])) - else: - en = w.root.strval+(u"".join([s.strval for s in w.suffix])) - if w.suffix: - ent = self.dudentrennung(en,self.qNeben) - #print "en=",en,"ent=",ent - Wneu += self.schiebe(offset,ent) - # Prüfen, ob dieses Wort als letztes stehen muss - # - #for pf in w.prefix + [w.root] + w.suffix: - # if i>0 and pf.props.get(NOT_AFTER_WORD) and str(W[i-1].root) in pf.props.get(NOT_AFTER_WORD): - # if VERBOSE: print "'%s' nicht erlaubt nach '%s'" % (pf,W[i-1].root) - # ok = False - # break - # if pf.props.get(ONLY_LAST_WORD) and i0: - # if VERBOSE: print "'%s' nur als erstes Wort erlaubt!" % pf - # ok = False - # break - #else: - # # letztes Wort - # for pf in w.prefix + [w.root] + w.suffix: - # #print "letztes Wort, Bestandteil",pf, pf.props - # if pf.props.get(NOT_LAST_WORD): - # if VERBOSE: print "'%s' nicht als letztes Wort erlaubt!" % pf - # ok = False - # break - offset += len(en) - offset += len(w.suffix_chars) - if ok and (Wneu not in loesungen): - log.debug ("Wneu=%r", Wneu) - loesungen.append(Wneu) - - return loesungen - - def hyph(self,word): - log.debug ("DCW hyphenate %r", word) - assert isinstance(word, unicode) - loesungen = self.zerlegeWort(word) - if len(loesungen) > 1: - # Trennung ist nicht eindeutig, z.B. bei WachsTube oder WachStube. - #hword.info = ("AMBIGUOUS", loesungen) - # nimm nur solche Trennstellen, die in allen Lösungen vorkommen, - # und für die Qualität nimm die schlechteste. - loesung = [] - loesung0, andere = loesungen[0], loesungen[1:] - for i,hp in enumerate(loesung0): - q = hp.quality - for a in andere: - if q: - for hp1 in a: - if hp1.indx==hp.indx \ - and hp1.nl==hp.nl and hp1.sl==hp.sl \ - and hp1.nr==hp.nr and hp1.sr==hp.sr: - q = min(q,hp1.quality) - break - else: - # Trennstelle nicht in der anderen Lösung enthalten - q = 0 - if q: - loesung.append(HyphenationPoint(hp.indx,q,hp.nl,hp.sl,hp.nr,hp.sr)) - if loesung: - # Es gibt mindestens eine Trennstelle, die bei allen Varianten - # enthalten ist, z.b. Wachstu-be. - pass - # hword.info = ("HYPHEN_OK", loesung) - else: - # Es gibt keine Trennstelle. - pass - elif len(loesungen) == 1: - # Trennung ist eindeutig - loesung = loesungen[0] - #hword.info = ("HYPHEN_OK", loesung) - if not loesung: - pass # hword.info = ("NOT_HYPHENATABLE", aWord) - else: - # Das Wort ist uns unbekannt. - return None - return HyphenatedWord(word, loesung) - - def i_hyphenate(self,aWord): - return ExplicitHyphenator.i_hyphenate_derived(self, aWord) - -if __name__=="__main__": - h = DCWHyphenator("DE",5) - h.test(outfname="DCWLearn.html") diff --git a/dist-packages/wordaxe/wordaxe/ExplicitHyphenator.py b/dist-packages/wordaxe/wordaxe/ExplicitHyphenator.py deleted file mode 100755 index 778a693b2..000000000 --- a/dist-packages/wordaxe/wordaxe/ExplicitHyphenator.py +++ /dev/null @@ -1,199 +0,0 @@ -#!/usr/bin/env python -# -*- coding: iso-8859-1 -*- - -__license__=""" - Copyright 2004-2008 Henning von Bargen (henning.vonbargen arcor.de) - This software is dual-licenced under the Apache 2.0 and the - 2-clauses BSD license. For details, see license.txt -""" - -__version__=''' $Id: __init__.py,v 1.2 2004/05/31 22:22:12 hvbargen Exp $ ''' - -import codecs - -from wordaxe.hyphen import SHY, HyphenatedWord -from wordaxe.BaseHyphenator import BaseHyphenator -from wordaxe.hyphrules import decodeTrennung - -class ExplicitHyphenator(BaseHyphenator): - """ - Allow to explicitly specify how a word should be hyphenated. - This is a slight improvement compared to BaseHyphenator. - - Usage: - - hyphenator = ExplicitHyphenator("DE") - # Add explicit hyphenation for a single word. - hyphenator.add_entry(u"analphabet", u"an8alpha5bet") - # Add several entries - hyphenator.add_entries({u"urinstinkt": u"ur8instinkt", - u"urinstinkte": u"ur8instinkte", - u"urinstinkten": u"ur8instinkt3en", - }) - - The last entry is probably not correctly hyphenated - according to the german hyphenation rules, but you don't - want to read "urinstink" in a text... - - The add_entry/add_entries usually expect unicode strings. - Bytes strings require the encoding argument to be supplied. - hyphenator.add-entries ("bräutigam", "bräu5ti5gam", encoding="iso-8859"). - - Instead of using numbers for defining the quality of a hyphenation - point, you may use the "~" (tilde) character, corresponding to - a medium quality hyphenation point: "bräu~ti~gam". - """ - - def __init__ (self, - language="DE", - minWordLength=4, - qHaupt=8, - qNeben=5, - qVorsilbe=5, - qSchlecht=3, - hyphenDir=None, - **options - ): - BaseHyphenator.__init__(self,language=language,minWordLength=minWordLength,**options) - - # Qualitäten für verschiedene Trennstellen - self.qHaupt=qHaupt - self.qNeben=qNeben - self.qVorsilbe=qVorsilbe - self.qSchlecht=qSchlecht - - # Stammdaten initialisieren - self.sonderfaelle = [] - - def add_entry(self, word, trennung, encoding=unicode): - if not isinstance(word, unicode): - word = unicode(word, encoding) - if not isinstance(trennung, unicode): - trennung = unicode(trennung, encoding) - # Ignore Case @TODO Umlaute usw.! - word = word.lower() - trennung = trennung.replace(u"~", u"5") - lenword = len(word) - for (lae, L) in self.sonderfaelle: - if lae == lenword: - L[word] = trennung - break - else: - self.sonderfaelle.append((lenword,{word: trennung})) - - def add_entries(self, mapping, encoding=unicode): - for word, trennung in mapping.items(): - self.add_entry(word, trennung, encoding) - - def add_entries_from_file(self, filename, encoding=None): - """ - Add entries from a text file (interpreting the file - using the given encoding). If encoding is not given - or None, try to extract the encoding from a line - near the start of the file like - # -*- coding: iso-8859-1 -*- - """ - if encoding is None: - import re - frag = open(filename,"rt").read(1000) - m = re.search(r"-\*- coding: ([^ ]+) -\*-", frag) - if m is not None: - encoding = m.group(1) - else: - raise ValueError("Encoding not specified and not found in file") - fh = codecs.open(filename, "rt", encoding) - for line in fh: - line = line.strip() - if not line or line.startswith("#"): - continue - word, trennung = line.split() - self.add_entry(word, trennung) - fh.close() - - - def hyph(self, word): - #print "ExplicitHyphenator hyph", word - lenword = len(word) - for (lae, L) in self.sonderfaelle: - if lae == lenword: - trennung = L.get(word.lower(), None) - if trennung is not None: - hword = HyphenatedWord(word, decodeTrennung(trennung)) - return hword - break - # Wort nicht gefunden - return None - - def i_hyphenate(self, aWord): - assert isinstance(aWord, unicode) - return self.stripper.apply_stripped(ExplicitHyphenator.hyph, self, aWord) - - def i_hyphenate_derived(self,aWord): - """ - You can use this method in classes derived from ExplicitHyphenator. - It will first split the word using BaseHyphenator, - then for each "subword" it will call ExplicitHyphenator, - and only call the derived classes hyph method for the still - unknown subwords. - - TODO: The implementation does not match the docstring - test: "hohenlimburg.de", "hohenlimburg.de)" - """ - #print "ExplicitHyphenator.i_hyphenate_derived", aWord - assert isinstance(aWord, unicode) - - # Helper function - - sub_hwords = [] - hword = BaseHyphenator.i_hyphenate(self,aWord) - #print "BaseHyphenator.i_hyphenate returned %r" % hword - if hword is None: - hword = HyphenatedWord(aWord,hyphenations=[]) - base_hyph_points = hword.hyphenations - last_indx = 0 - nr = 0 - for hpnum, hp in enumerate(base_hyph_points): - if isinstance(hp, int): - hp = HyphenationPoint(hp, quality=5, sl=SHY) - subword = hword[last_indx+nr:hp.indx] - # handle subword - if SHY in subword: - sub_hword = self.stripper.apply_stripped(BaseHyphenator.hyph, self, subword) - else: - sub_hword = self.stripper.apply_stripped(ExplicitHyphenator.hyph, self, subword) - if sub_hword is None: - sub_hword = self.stripper.apply_stripped(self.hyph, self, subword) - if sub_hword is None: - sub_hword = HyphenatedWord(subword, hyphenations=[]) - sub_hwords.append(sub_hword) - # end handle subword - last_indx = hp.indx - nr = hp.nr - # Now the last subword - subword = hword[last_indx:] - # handle subword - if SHY in subword: - sub_hword = self.stripper.apply_stripped(BaseHyphenator.hyph, self, subword) - else: - sub_hword = self.stripper.apply_stripped(ExplicitHyphenator.hyph, self, subword) - if sub_hword is None: - sub_hword = self.stripper.apply_stripped(self.hyph, self, subword) - if sub_hword is None: - sub_hword = HyphenatedWord(subword, hyphenations=[]) - sub_hwords.append(sub_hword) - #end handle subword - if len(sub_hwords) > 1: - return HyphenatedWord.join(sub_hwords) - else: - return sub_hwords[0] # Kann auch None sein. - - -if __name__=="__main__": - h = ExplicitHyphenator("DE",5) - h.add_entry("Bräutigam", "Bräu5ti5gam", "iso-8859-1") - h.add_entries({u"Urinstinkt": u"Ur8instinkt", - u"Urinstinkte": u"Ur8instinkte", - u"Urinstinkten": u"Ur8instinkt3en", - } - ) - h.test(outfname="ExplicitLearn.html") diff --git a/dist-packages/wordaxe/wordaxe/PyHnjHyphenator.py b/dist-packages/wordaxe/wordaxe/PyHnjHyphenator.py deleted file mode 100755 index 03bedc83a..000000000 --- a/dist-packages/wordaxe/wordaxe/PyHnjHyphenator.py +++ /dev/null @@ -1,118 +0,0 @@ -#!/usr/bin/env python -# -*- coding: iso-8859-1 -*- - -__license__=""" - Copyright 2004-2008 Henning von Bargen (henning.vonbargen arcor.de) - This software is dual-licenced under the Apache 2.0 and the - 2-clauses BSD license. For details, see license.txt -""" - -__version__=''' $Id: __init__.py,v 1.2 2004/05/31 22:22:12 hvbargen Exp $ ''' - -import os,sys -import copy - -from wordaxe.hyphen import * -from xml.sax.saxutils import escape,quoteattr - -from wordaxe.ExplicitHyphenator import ExplicitHyphenator - -VERBOSE = False - -class PyHnjHyphenator(ExplicitHyphenator): - """ - Hyphenation using pyHnj (Knuth's algorithm). - The pyHnj/libhnj code does not work if german words contain umlauts. - As a work-around you can use a pure python version that does - not use pyHnj/libhnj and should give the same results. - """ - - def __init__ (self, - language="EN", - minWordLength=4, - quality=8, - hyphenDir=None, - purePython=False, - **options - ): - """ Note: - The purePython version does NOT use Knuth's algorithm, - but a more simple (and slower) algorithm. - """ - ExplicitHyphenator.__init__(self,language=language,minWordLength=minWordLength, **options) - if hyphenDir is None: - hyphenDir = os.path.join(os.path.split(__file__)[0], "dict") - self.purePython = purePython - fname = os.path.join(hyphenDir, "hyph_%s.dic" % language) - # first line is set of characters, all other lines are patterns - if self.purePython: - # Note: we do not use a TRIE, we just store the patterns in a dict string:codes - lines = open(fname).read().splitlines() - self.characters = lines.pop(0) - self.patterns = {} - for pattern in lines: - pat = "" - codes = "" - digit = "0" - for ch in pattern: - if ch>='0' and ch<='9': - digit = ch - else: - codes = codes+digit - pat = pat+ch - digit = "0" - codes = codes+digit - self.patterns[pat.decode("iso-8859-1")] = codes - else: - import pyHnj - self.hnj = pyHnj.Hyphen(fname) - self.quality = quality - - # Hilfsfunktion - def schiebe(self,offset,L): - return [HyphenationPoint(h.indx+offset,h.quality,h.nl,h.sl,h.nr,h.sr) for h in L] - - def zerlegeWort(self,zusgWort): - if self.purePython: - word = "." + zusgWort.lower() + "." - # Alle Längen durchgehen (minimum: 2) - codes = ["0"]*len(word) - for patlen in range(2,len(word)): - #print "patlen %d" % patlen - for startindx in range(len(word)-patlen): - #print "startindx %d" % startindx - try: - patcode = self.patterns[word[startindx:startindx+patlen]] - #print "testpat=%s patcode=%s" % (word[startindx:startindx+patlen],patcode) - for i,digit in enumerate(patcode): - if digit > codes[i+startindx]: - codes[i+startindx] = digit - except KeyError: - pass - codes = codes[2:-1] - else: - codes = self.hnj.getCodes(zusgWort.lower()) - hyphPoints = [] - for i, code in enumerate(codes): - # wir trennen nicht das erste oder letzte Zeichen ab - if i==0 or i==len(codes)-1: - continue - if (ord(code)-ord('0')) % 2: - hyphPoints.append(HyphenationPoint(i+1,self.quality,0,self.shy,0,"")) - return hyphPoints - - def hyph(self,aWord): - assert isinstance(aWord, unicode) - hword = HyphenatedWord(aWord, hyphenations=self.zerlegeWort(aWord)) - # None (unknown) kann hier nicht vorkommen, da der - # Algorithmus musterbasiert funktioniert und die Wörter - # sowieso nicht "kennt" oder "nicht kennt". - return hword - - def i_hyphenate(self, aWord): - return ExplicitHyphenator.i_hyphenate_derived(self, aWord) - -if __name__=="__main__": - h = PyHnjHyphenator("de_DE",5, purePython=True) - h.test(outfname="PyHnjLearn.html") - diff --git a/dist-packages/wordaxe/wordaxe/__init__.py b/dist-packages/wordaxe/wordaxe/__init__.py deleted file mode 100755 index d830bcf1c..000000000 --- a/dist-packages/wordaxe/wordaxe/__init__.py +++ /dev/null @@ -1,21 +0,0 @@ -# -*- coding: iso-8859-1 -*- - -__license__=""" - Copyright 2004-2010 Henning von Bargen (henning.vonbargen arcor.de) - This software is dual-licenced under the Apache 2.0 and the - 2-clauses BSD license. For details, see license.txt in the doc directory. -""" - -__version__=''' $Id: __init__.py,v 1.2 2004/05/31 22:22:12 hvbargen Exp $ ''' -__doc__='Hyphenation support using different algorithms' - -__all__ = ["BaseHyphenator", "DCWHyphenator", "PyHnjHyphenator", "SHY", "HyphenationPoint", "HyphenatedWord"] - -from wordaxe.hyphen import SHY, HyphenationPoint, HyphenatedWord, Hyphenator, Cached - -# This is meant as a registry for Hyphenators. -# if you want to use a Hyphenator A for language B, -# just set hyphRegistry[A]=B -hyphRegistry = {} - -version = "wordaxe 1.0.1" diff --git a/dist-packages/wordaxe/wordaxe/dict/DEhyph.py b/dist-packages/wordaxe/wordaxe/dict/DEhyph.py deleted file mode 100755 index 43e2f785f..000000000 --- a/dist-packages/wordaxe/wordaxe/dict/DEhyph.py +++ /dev/null @@ -1,2068 +0,0 @@ -# -*- coding: UTF-8 -*- -# -# -# Copyright 2004-2008 Henning von Bargen (henning.vonbargen arcor.de) -# This software is dual-licenced under the Apache 2.0 and the -# 2-clauses BSD license. For details, see license.txt -# -# - -# Sonderfälle mit Angabe der Trennung -special_words = u""" -altbauerhaltung,TRENNUNG:alt8bau8erhal5tung -analphabet,TRENNUNG:an8alpha5bet -analphabeten,TRENNUNG:an8alpha5be5ten -analphabetismus,TRENNUNG:an8alpha5be5tis4mus -sprecherziehung,TRENNUNG:sprech8erzie4hung -urinstinkt,TRENNUNG:ur8instinkt -urinstinkte,TRENNUNG:ur8instinkte -urinstinkten,TRENNUNG:ur8instinkten -wordaxe,TRENNUNG:word8axe -""" - -# Wort,property,property -roots = u""" -aas -abend -aber -achs -acht -ächt,NEED_SUFFIX -add,FREMDWORT,NEED_SUFFIX -addit,FREMDWORT,NEED_SUFFIX -adel -adler -adress -affe -affin,FREMDWORT -agent -ahn -aktion -aktiv,FREMDWORT -aktual,FREMDWORT,NEED_SUFFIX -aktuell -akut -akzept,FREMDWORT -algorithmen,FREMDWORT -algorithmik,FREMDWORT -algorithmus,FREMDWORT -all -allerg,FREMDWORT -allerlei -alliterat,FREMDWORT,NEED_SUFFIX -alm -almosen -also -alt -ameise -amerika -amput,FREMDWORT,NEED_SUFFIX -amt -analog,FREMDWORT -analys,FREMDWORT -ander -angst -annot,FREMDWORT,NEED_SUFFIX -antwort -apfel -arbeit -arbeit -archiv,FREMDWORT -argument,FREMDWORT -arm -arsch -art -artikel -artikul,FREMDWORT,NEED_SUFFIX -arzt -assembl,FREMDWORT -ast -atem -atmosphär,FREMDWORT,TRENNUNG:at2mo5sphär -attribut,FREMDWORT -auch -audio -auf -aug -auge -aus -auto -automat,FREMDWORT -autor,FREMDWORT -außen -außer -außerdem -ähn,NEED_SUFFIX -ält,NEED_SUFFIX -änder,NEED_SUFFIX -ängst,NEED_SUFFIX -ärzt -bach -bäch,NEED_SUFFIX -back -bäck,NEED_SUFFIX -bad -bäd,NEED_SUFFIX -bahn -bald -ball -bälle -banane -band -bänd,NEED_SUFFIX -bank -bänk,FREMDWORT -bann -bar -bär -barriere -barsch -bart -bärt,NEED_SUFFIX -bas,FREMDWORT,NEED_SUFFIX -bat -bau -bauch -bäuch,NEED_SUFFIX -baum -bäume -bayer -bayr -becher -bei -beim,NO_SUFFIX -bein -berg -besser -best -bet -bett -beut -beutel -biblio,FREMDWORT -bier -biet -bild -bind -binnen -birg -birn -bis -biss -bist,NO_PREFIX,NO_SUFFIX -bistum -bit -bitt -blatt -blätt,NEED_SUFFIX -blau -blech -bleib -blend -blick -blitz -block -blume -blüm,NEED_SUFFIX -bluse -blut -boden -boot -bote -box -bracht -brand -bränd -brannt -brat -brauch -bräuch,NEED_SUFFIX -braun -bräun,NEED_SUFFIX -brech -brei -breit -brems -brenn -brett -brief -bring -broch,NEED_SUFFIX -brot -bruch -brust -brüst -brut -brutal -brutto -buch -buffer,ENGLISCH -bund -büro -burg -bürg -bus -buß -butter -button,ENGLISCH -byte -cache,KEEP_TOGETHER -chance -chef -chines,NEED_SUFFIX -chiffr,NEED_SUFFIX -chip -cholesterin -client,ENGLISCH -code -codier,FREMDWORT -cola -comput,ENGLISCH -controll,ENGLISCH -copy,ENGLISCH -count,ENGLISCH,KEEP_TOGETHER -da,NO_SUFFIX -dach -daddel -dampf -dank -dann -dar -darf -darm -das,NO_SUFFIX -dass,NO_SUFFIX -darm -därm -datei -daten -datum -dauer -daumen -deck -decod,ENGLISCH -defin,FREMDWORT -definit,FREMDWORT -dehn -dein -delikt -dem,NO_PREFIX,NO_SUFFIX,ONLY_FIRST_WORD -dem,NO_PREFIX,NO_SUFFIX,ONLY_LAST_WORD -den,NO_PREFIX,NO_SUFFIX,ONLY_FIRST_WORD -denen,NO_PREFIX,NO_SUFFIX,ONLY_FIRST_WORD -denk -denn,NO_SUFFIX -dennoch -der,NO_PREFIX,NO_SUFFIX,ONLY_FIRST_WORD -deren -derer -derzeit -des,NO_PREFIX,NO_SUFFIX -design -dessen -desto,NO_SUFFIX -detail,TRENNUNG:de5tail -deut -deutsch -dialog -dick -dich,NO_SUFFIX -dicht -die -dieb -dien,NEED_SUFFIX -dienstag -dies -diesel -digital,FREMDWORT -ding -direkt,FREMDWORT -diskette -divid,FREMDWORT -doch -dokument,FREMDWORT -dom -donau -donner -doppel -doppl,NEED_SUFFIX -dorf -dort -dose -dos,FREMDWORT,NEED_SUFFIX -down,ENGLISCH -draht -drang -dräng,NEED_SUFFIX -dreh -drei -dreist -dring -dritt -druck -drück -dual -duell,FREMDWORT -dukt,FREMDWORT,NEED_PREFIX -dumm -dümmer -dünn -dukt,FREMDWORT -durch -durf,NEED_SUFFIX -dürf,NEED_SUFFIX -duz,FREMDWORT -dynam,FREMDWORT -dynamo -eben -echse -echt -eck -edel -edit,FREMDWORT -edikt,FREMDWORT -effekt,FREMDWORT -ehe,NOT_AFTER_WORD:best -eher -ei,NOT_BEFORE:n -eigen -eign,NEED_SUFFIX -eignis -eimer -ein -einig -einheit -einzel -einzig -eis -eisen -eisern -elefant -elektro -elektron -element,FREMDWORT -elter,NEED_SUFFIX -emblem -empfahl -empfand -empfand -empfang -empfehl,NEED_SUFFIX -empfind -empfing -empfohl -encod,ENGLISCH - -end,NOT_LAST_WORD -# damit z.B. "lachende" nicht als zwei Wörter gesehen wird -end,NEED_SUFFIX,SINGLE_WORD -# z.B. für "beenden" -# Was macht man mit "wortenden"??? -# Hier kann nur eine Unterscheidung zwischen Substantiven, Adjektiven, Verben etc. helfen! - -energ,FREMDWORT -eng -englisch -erd -erst -ess -etwa -eventu,FREMDWORT,NEED_SUFFIX -exakt,TRENNUNG:ex4akt -exempel,TRENNUNG:ex2em4pel -exemplar,TRENNUNG:ex2em4plar -exist,FREMDWORT,KEEP_TOGETHER -exot -extern -extra,TRENNUNG:ex4tra -extrem,FREMDWORT -fach -fähig -fahl -fahr -fähr -fakt -falen -# von Westfalen -fälisch -fall -fäll,NEED_SUFFIX -falsch -fälsch,NEED_SUFFIX -falt -fält,NEED_SUFFIX -familie -fand -fang -farb -färb,NEED_SUFFIX -farn -fass -fast -fecht -feder -feedback -fehl -fekt,FREMDWORT,NEED_PREFIX -feier -feld -fell -fenster -fer,FREMDWORT,NEED_PREFIX -ferenz,FREMDWORT -fern -fertig -fess,FREMDWORT -fessel -fest -fete -fetisch -fett -feucht -fick -figur,FREMDWORT -film -filter,ENGLISCH -find,NEED_SUFFIX -finger -firewall,TRENNUNG:fire8wall -firm,FREMDWORT -fisch -fix,FREMDWORT -fiz,FREMDWORT,NEED_SUFFIX -flach -fläche -flamm -flasche -flat,FREMDWORT -# für Inflation -fleisch -flex,FREMDWORT -flexibel,FREMDWORT -flexible,FREMDWORT -flieg,NEED_SUFFIX -flug -flüg -flügel -fluss -flüss,NEED_SUFFIX -fluß -floss -folg -fon -fön -forder -förder -forge,ENGLISCH -form,FREMDWORT -format,FREMDWORT -forsch -fort -forward,ENGLISCH -frag,NEED_SUFFIX -frau -frei -fremd -freud -freund -fries -fried -frisch -frist -froh -fröhlich -fromm -frosch -frösch,NEED_SUFFIX -früh -frust -fug -füg -fühl -fuhr -führ -füll -fund -fünd,NEED_SUFFIX -funk -funktion,FREMDWORT -fünf -für -fuß -füß,NEED_SUFFIX -gab,NEED_SUFFIX -gabel -gam,ENGLISCH,NEED_SUFFIX -gang -gäng,NEED_SUFFIX -ganz -gänz,NEED_SUFFIX -gar -garant,FREMDWORT -garn -garnele -geb,NEED_SUFFIX -gegen -gegner -geh,NEED_SUFFIX -geh,NO_SUFFIX,NOT_BEFORE_CHAR:aeiouäüöy -geig -geil -geist -gelb -geld -gelt -gemeinde -genau -gener,FREMDWORT,NEED_SUFFIX -gerade -gern -gesund -gesünd,NEED_SUFFIX -gib -gicht -gilb -gilt -gips -glaub -gleich -gleis -glied -glossar -gold -glück -grab,NEED_SUFFIX -graf -grafik -gramm,FREMDWORT -gras -greif -grenadier -grenz -griff -grimm -grob -gröb,NEED_SUFFIX -groß -größ,NEED_SUFFIX -grün -grund -grupp,FREMDWORT -gruß -grüß,NEED_SUFFIX -gült,NEED_SUFFIX -gunst,NO_SUFFIX -günst,NEED_SUFFIX -guss -gut -güt,NEED_SUFFIX -gieß,NEED_SUFFIX -greif -griff -hab,NEED_SUFFIX -haft -hag,NEED_SUFFIX -hahn -hähne -hai -hand -händ,NEED_SUFFIX -handel -handl,NEED_SUFFIX -händler -hang -häng,NEED_SUFFIX -halb -halde -half -hälft,NEED_SUFFIX -hals -halt -hält -hammer -hamster -hämmer -hans -hardware,TRENNUNG:hard5ware -hark -hart -hass -hast -hat,NO_SUFFIX -hatt,NEED_SUFFIX -hätt,NEED_SUFFIX -hauf -häuf,NEED_SUFFIX -haupt -haus -häus,NEED_SUFFIX -haut -häut,NEED_SUFFIX -head,ENGLISCH -heb -helf -hell -heil -heim -heimat -heiser -heiß -heiter -heiz,NEED_SUFFIX -helf -hemd -hemm -her -heraus -herein -herd -heu -heute -heran -herz -hier -hilf -hin -hinder,NEED_SUFFIX -hirn -hirsch -hint,NEED_SUFFIX -histor,FREMDWORT,NEED_SUFFIX -hob -# den ich vom Boden aufhob -hobby -hobel -hoch -höchst -hof -hoff -hoh,NEED_SUFFIX -höh,NEED_SUFFIX -hol -holz -hör -horch -hose -host,ENGLISCH -hüll,NEED_SUFFIX -hund -hundert -hust,NEED_SUFFIX -hut -idee -identifiz,FREMDWORT,NEED_SUFFIX -identifik,FREMDWORT,NEED_SUFFIX -ident,FREMDWORT -igel -ignor,FREMDWORT,NEED_SUFFIX -immer -initi,FREMDWORT,NEED_SUFFIX -input,NO_SUFFIX -industr,FREMDWORT,NEED_SUFFIX -insbesonder -ihn -ihr -illegal -im,NO_PREFIX,NO_SUFFIX,SINGLE_WORD -image,KEEP_TOGETHER -in,NO_PREFIX,NO_SUFFIX,ONLY_FIRST_WORD -ins,NO_PREFIX,NO_SUFFIX,ONLY_FIRST_WORD -indem -indes -index -indiz,FREMDWORT -individu,FREMDWORT -infolge,NO_SUFFIX -inhab,NEED_SUFFIX -inhalt -initial,FREMDWORT -inn -install,FREMDWORT,NEED_SUFFIX,TRENNUNG:in5stall -intend,FREMDWORT,NEED_SUFFIX -interess,FREMDWORT,NEED_SUFFIX -intern -internet -irak -iran -irden -irgend -ist,NO_SUFFIX -jacke -jahr -jähr,NEED_SUFFIX -jed -jedoch -jeglich -jemand -jesus -jet -jetzt -jeweil -job -jojo -joystick,TRENNUNG:joy5stick -jubel -jude -jüd,NEED_SUFFIX -jugend -jung -jüng,NEED_SUFFIX -junkt,FREMDWORT,NEED_SUFFIX -jurist,FREMDWORT -just -jux -kabel -kabin -kachel -kacke -kader -kaff -kaffee -käfig -kahl -kai -kaiman -kajal -kakao -kalender -kaliber,FREMDWORT -kalibr,FREMDWORT,NEED_SUFFIX -kalt -kam -kamera -kamm -kampf -kämpf -kandid,FREMDWORT,NEED_SUFFIX -kann -kanne -kanzel -kanzlei -kanzler -kapitän -kart,FREMDWORT,NEED_SUFFIX -käse -kast,NEED_SUFFIX -käst,NEED_SUFFIX -katze -kau -kauf -käuf,NEED_SUFFIX -kaum -kav,FREMDWORT -kein -keinerlei -kenn,NEED_SUFFIX -kern -kett -ketchup -kick -kiefer -kind -kirch -kist,NEED_SUFFIX -klamm -klar -klär -klass -klausel -klausul,FREMDWORT,NEED_SUFFIX -kleid -klein -klick -klinik -klug -klüg,NEED_SUFFIX -klud,FREMDWORT,NEED_SUFFIX -knabe -knarz -knie -knister -knot,NEED_SUFFIX -knopf -knöpf,NEED_SUFFIX -knüpf -koch,NEED_SUFFIX -kohl,NEED_SUFFIX -koll,FREMDWORT,NEED_PREFIX -kolleg,FREMDWORT -kollekt,FREMDWORT -kombin,FREMDWORT,NEED_SUFFIX -komik -komisch -komm,NEED_SUFFIX -komma -kommata -kommandier,FREMDWORT,NEED_SUFFIX -kommando -kommerz,FREMDWORT -kommun,FREMDWORT -kommuniz,FREMDWORT -kommunikat,FREMDWORT -kompatibel,FREMDWORT -kompatibil,FREMDWORT,NEED_SUFFIX -kompatibl,FREMDWORT,NEED_SUFFIX -kompil,FREMDWORT,NEED_SUFFIX -komplex,FREMDWORT -komplett -komplik,FREMDWORT,NEED_SUFFIX -kompliment -kompliz,FREMDWORT,NEED_SUFFIX -komponent,FREMDWORT -kompromiss,FREMDWORT -kompromiß,FREMDWORT -kompromitt,FREMDWORT -konflikt -konn,NEED_SUFFIX -konsol -konsolid,FREMDWORT -kontakt -kop,FREMDWORT -kopf -korb -korn -korrekt,FREMDWORT -korrig,FREMDWORT,NEED_SUFFIX -kost -kotz -kraft -kragen -kram -kram -krank -kränk -kreide -kreis -kreuch -krumm -kräft,NEED_SUFFIX -krägen -kräm,NEED_SUFFIX -kreat,FREMDWORT -kriech -krieg -krit -kriteri,FREMDWORT -kroch -kuf -kult,FREMDWORT -kunde -kunst -kur -kurs -kurs,FREMDWORT,NEED_SUFFIX -kurv,NEED_SUFFIX -kurz -könn,NEED_SUFFIX -körper -köst,NEED_SUFFIX -künd,NEED_SUFFIX -künst,NEED_SUFFIX -künstl,NEED_SUFFIX -kürz,NEED_SUFFIX -lad -lampe -land -länd,NEED_SUFFIX -lang -läng,NEED_SUFFIX -langsam -lag,NEED_SUFFIX -lapp -laptop -lass -läss,NEED_SUFFIX -last -laterne -latenz,FREMDWORT -latrine -lau -laub -lauer -lauf -läuf,NEED_SUFFIX -laug -laus -laut -läut -laser -leb -ledig -leer -leg -lehn -lehr -leib -leicht -leid -leist -leit -lenk -lern -les,NEED_SUFFIX -letz,NEED_SUFFIX -letzt -leucht -licht -lieb -lied -lief -lieferant,FREMDWORT -lieg -lier,NEED_PREFIX -limburg -limit,FREMDWORT -linear,FREMDWORT -ling -link -lischt,NEED_PREFIX -list -list,ENGLISCH -literat,FREMDWORT,NEED_SUFFIX -lizenz,FREMDWORT -load,ENGLISCH,KEEP_TOGETHER -loch -löcher -lock,NEED_SUFFIX -log -# von lüge -log,FREMDWORT -# von logik -lohn -lokal,FREMDWORT -lor,NEED_SUFFIX -los -lös -lösch -luft -lüg,NEED_SUFFIX -lung,NEED_SUFFIX -lust -mag -mach -macht -mächt,NEED_SUFFIX -mädchen -mager -mahn -mai -makler -makel -mal -manag,ENGLISCH,NEED_SUFFIX -manch -mangel -mängel -mann -männ,NEED_SUFFIX -manu,FREMDWORT,NEED_SUFFIX -mappe -mark,FREMDWORT -maß -mäß -masse -mast -material -materialien -mauer -maus -maxim,FREMDWORT,NEED_SUFFIX -medi,FREMDWORT,NEED_SUFFIX -meer -mehr -meid -meile -mein -meist -meld,NEED_SUFFIX -mensch -menü -merk -mess -messag,ENGLISCH,NEED_SUFFIX -meter,FREMDWORT -methode,TRENNUNG:me5tho5de -miet -micro,TRENNUNG:mi2cro -mikro,TRENNUNG:mi4kro -milch -milli -mind,NEED_SUFFIX -minim,FREMDWORT,NEED_SUFFIX -minister,FREMDWORT -ministeri,FREMDWORT -minus -minute -misch -miss -miss,FREMDWORT,NEED_SUFFIX -mist -mit -mitte -mittel -mittl,NEED_SUFFIX -mobil,FREMDWORT -möbel -möcht,NEED_SUFFIX -mode -modus -moment -momentan -monat -mond -monitor -monoton,FREMDWORT -monogam,FREMDWORT -mög,NEED_SUFFIX -mohr -möhr -montag -moor -most -mord -mörder -motor -mull,NO_SUFFIX -müll -multi,NO_SUFFIX -mund -münd,NEED_SUFFIX -mücke -müh -mumie -mumifik,FREMDWORT -mumifiz,FREMDWORT -muse,FREMDWORT -museum -muskel -muss -müss,NEED_SUFFIX -muster -mut -mutter -mütter -mütze -myst -myth -nach -nacht -nächst -nächt,NEED_SUFFIX -nack -näckig -nackt -nadel -nagel -nägel -nagl,NEED_SUFFIX -nah -näh,NEED_SUFFIX -nahm,NEED_SUFFIX -naiv,TRENNUNG:na5iv -nannt,NEED_PREFIX -nation,FREMDWORT -name -nase -nass -nässe -nativ -natur -natürlich -navig,FREMDWORT -#? -neben -need,ENGLISCH -neg,FREMDWORT -negat,FREMDWORT -nehm,NEED_SUFFIX -neig,NEED_SUFFIX -nein -nerv -neu -neun,NEED_SUFFIX -netto -netz -nicht -nick -nie -nieder -niedrig -niemand -nieß -nimm -noch,NO_SUFFIX -nom,FREMDWORT -nomm,NEED_PREFIX -norm,FREMDWORT -not -nötig -nug -nüg,NEED_SUFFIX -nummer,FREMDWORT -nunft -nünft,NEED_SUFFIX -null -nummer,FREMDWORT -nur,NO_SUFFIX -nutte -nutz -nütz -obacht -oben -ober -obgleich,TRENNUNG:ob5gleich -obig -objekt,FREMDWORT -oblieg,NEED_SUFFIX -obrig -obschon,TRENNUNG:ob5schon -obwohl,TRENNUNG:ob5wohl -öde -oder -ofen -offen -öffentlich -offiz,FREMDWORT -öffn,NEED_SUFFIX -ohn -ohr -okkult -öl -oliv -online,KEEP_TOGETHER -oper,FREMDWORT -opfer -optim,FREMDWORT,NEED_SUFFIX -option,FREMDWORT -orakel -ordn,NEED_SUFFIX -organ,FREMDWORT -origin,FREMDWORT -ort -ost -oster -otter -oval -ozon -ozelot -pack -paff,NEED_SUFFIX -page -paket -palm -panda -panne -panzer -papp -par,FREMDWORT -park -partikel -pass,FREMDWORT -pat,NEED_SUFFIX -patent -patz,NEED_SUFFIX -pauk,NEED_SUFFIX -pause -peripher,FREMDWORT,NEED_SUFFIX -perform,ENGLISCH -person,FREMDWORT -persön,NEED_SUFFIX -permanent -pfahl -pfähl -pfand -pfänd -pfaff -pfarr -pfeif -pfeil -pferd -pfingst -pfleg -pflicht -pflug -pflüg -phase -phob,FREMDWORT -phon -phil,FREMDWORT -plan -platt -platz,FREMDWORT -play,ENGLISCH -plem,FREMDWORT,NEED_PREFIX,NEED_SUFFIX -polygon -polyphon -pomp -port,FREMDWORT -position,FREMDWORT -positiv,FREMDWORT -post,FREMDWORT -posting -potent,FREMDWORT -potenz,FREMDWORT -power,ENGLISCH -pracht -prächt -prall -prang -prakt,FREMDWORT,NEED_SUFFIX -prank -praxis,NO_SUFFIX -preis -pret,FREMDWORT,NEED_PREFIX,NEED_SUFFIX -prinz -prinzess -prinzeß -prinzip,FREMDWORT -prior,FREMDWORT -privat,FREMDWORT -prob,FREMDWORT,NEED_SUFFIX -programm,TRENNUNG:pro5gramm -problem -projekt -proto,NEED_SUFFIX -prozess,FREMDWORT -prozeß,FREMDWORT -prüf -publik,FREMDWORT -puff -pulver -pump -punkt -pursuit,ENGLISCH,TRENNUNG:pur5suit -putz -python,TRENNUNG:py-thon -qual -quäl -qualität -quart,FREMDWORT -quell -quer -rad -rad,FREMDWORT -radi,FREMDWORT -rahm -rand -ränd,NEED_SUFFIX -rang -räng,NEED_SUFFIX -rat -rät -ratte -raub -räuber -rauch -räucher -rauf -raum -räum,NEED_SUFFIX -real,FREMDWORT -#realis,FREMDWORT -rech,NEED_SUFFIX -recherch,FREMDWORT,NEED_SUFFIX -red,NEED_SUFFIX -recht -rechn,NEED_SUFFIX -reg,NEED_SUFFIX -regel -regie -regier -region,FREMDWORT -register -registr,FREMDWORT,TRENNUNG:re3gis5tr -regl,NEED_SUFFIX -reib -reif -rein -reich -reis,NO_SUFFIX -reise -reit -rekord -rekt,FREMDWORT -relat,FREMDWORT -rempel -renn -republik -result,FREMDWORT -rest -restaur,FREMDWORT,NEED_SUFFIX -rhein -richt -riech -ries -right,ENGLISCH -ring -risiko -risiken -rock -roll -rot,FREMDWORT -route -routin,FREMDWORT,NEED_SUFFIX -ruch -# ruchlos -ruck -rück -ruf -ruh -ruhr -rühr -rum -rund -runter -saal -sach -säch,NEED_SUFFIX -sag -saison -saite -salvator -salz -sam -samm,NEED_SUFFIX -sampl,ENGLISCH,NEED_SUFFIX -samt -sämtlich -sang -satz -sätz,NEED_SUFFIX -sau -sauber,NEED_SUFFIX -säuber -saug -säug -schab -schach -schacht -schachtel -schad,NEED_SUFFIX -schäd,NEED_SUFFIX -schaf -schaft -schäft -schaff -schal -schalt -schatt,NEED_SUFFIX -schatz -schau -schaufel -scheibe -scheid -schein -scheiß -scheit -scheitel -schenk -scher -scherbe -scherz -scheu -schicht -schick -schieb -schied -schief -schien -schiit -schilf -schiff -schirm -schlacht -schlaf -schläf,NEED_SUFFIX -schlaff -schlag -schlamm -schlämm -schlange -schlank -schlau -schlecht -schleck -schleier -schlemm -schleunig,NEED_SUFFIX -schlief -schließ -schling -schlitt -schloss -schluss -schlüss,NEED_SUFFIX -schlüssel -schmal -schmäl,NEED_SUFFIX -schmied -schmieg -schmier -schnee -schneid -schnell -schnief -schnitt -schokolade -schon -schön -schopf -schöpf -geschoren -schorf -schorle -schorn -schrank -schränk,NEED_SUFFIX -schreck -schrei -schreib -schrein -schreit -schrieb -schrift -schritt -schröpf -schrull -schub -schuh -schul -schuld -schulter -schur -schuss -schuß -schutz -schütz -schwab -schwäb,NEED_SUFFIX -schwach -schwall -schwalbe -schwamm -schwämm -schwan -schwanz -schwarm -schwärm -schwarz -schwein -schweiz -schwyz -schwester -schwer -schwierig -schwimm -schwind -schwamm -sechs -see -segment,FREMDWORT -seh -sehr -seicht -seil -sein -seit -sekunde -sekt,FREMDWORT -selb -selekt,FREMDWORT -sell,NEED_PREFIX -selten -semmel -send -senf -sent,FREMDWORT -sequenz,FREMDWORT -# Unterscheidung server/serving <-> servier -serie -serv,ENGLISCH -serv,FREMDWORT -sess,FREMDWORT -sess -setz -sheet,ENGLISCH -sicher -sich,NO_SUFFIX -sicht -sident,FREMDWORT,NEED_PREFIX -sie,NO_PREFIX,NO_SUFFIX,SINGLE_WORD -sieb -sied -sieg -sieh,NEED_SUFFIX -siel -silbe -silber -simul,FREMDWORT -sind -sing -sinn -situ,FREMDWORT,NEED_SUFFIX -sitz -sleep,ENGLISCH -# Sollte das hier stehen bleiben? -so -socke -soft,ENGLISCH -software,TRENNUNG:soft5ware -sohn -solch -soll -solut,FREMDWORT -sommer -sonder -sonn,NEED_SUFFIX -sonst -sorg -sound,TRENNUNG:sound -source,TRENNUNG:source -spalt -span -spann -spar -spargel -spaß -spaten -spät -spatz -spazier -speck -speed,ENGLISCH -speich,NEED_SUFFIX -speis -sperr -spekul,FREMDWORT,NEED_SUFFIX -spezi,FREMDWORT,NEED_SUFFIX -spezif,NEED_SUFFIX -spezifiz,FREMDWORT,NEED_SUFFIX -spezifik,FREMDWORT,NEED_SUFFIX -sphär,FREMDWORT -spiegel -spiel -spir,FREMDWORT,NEED_SUFFIX -spitz -sport -sprach -sprang -sprech -sprich -spring -sproch,NEED_PREFIX,NEED_SUFFIX -spruch -sprüch,NEED_SUFFIX -sprung -sprüng,NEED_SUFFIX -spur -spür -stab -stäb,NEED_SUFFIX -stabil,FREMDWORT -stach -stadt -städt -stahl -stall -stalt,NEED_PREFIX -stamm -stämm,NEED_SUFFIX -stank -stand -ständ,NEED_SUFFIX -standard -stark -stärk,NEED_SUFFIX -starr -start -stat,FREMDWORT -statt -stau -staub -stäub -stauch -staun -stech -steif -steig -steil -stein -steiß -steh -stehl,NEED_SUFFIX -stell -stereo,NO_PREFIX,FREMDWORT -steuer -stich -stief -stiefel -stieg -stiel -stift -stil -still -stimm -stink -stirn -stoch -stock -stopf -stopp -stör -stoß -stöß,NEED_SUFFIX -stotter -straf -straff -strahl -strähn,NEED_SUFFIX -stramm -strang -sträng,NEED_SUFFIX -straße -strauch -strauch -streck -streif -streich -streng -streu -strich -strick -striffen -string,ENGLISCH -strom -strukt,FREMDWORT,NEED_PREFIX -struktur,FREMDWORT -strunk -stube -stumm -stunde -stunk -sturm -sturz -stürz,NEED_SUFFIX -stutz -stütz -style,ENGLISCH -such -suffiz,FREMDWORT -suffix,FREMDWORT -sult,FREMDWORT -summ,FREMDWORT -support -surf,ENGLISCH -symbol -symptom,FREMDWORT -synchron,FREMDWORT -system,FREMDWORT -tabelle -tag -täg,NEED_SUFFIX -tal -tan -tank -tasche -tasse -tast,NEED_SUFFIX -tastatur -tat -tau -taug,NEED_SUFFIX -tauch,NEED_SUFFIX -taumel -tausch -täusch,NEED_SUFFIX -tausend -team,KEEP_TOGETHER -techn,FREMDWORT,NEED_SUFFIX -teil -teller -temper,FREMDWORT,NEED_SUFFIX -tempo -temporär -termin -terrari,FREMDWORT,NEED_SUFFIX -territori,FREMDWORT -tertiär -test -text,FREMDWORT -textur,ENGLISCH -tief -tier -tiger -tisch -titel -titt,NEED_SUFFIX -toast -tob,NEED_SUFFIX -tochter -töchter -tod -töd,NEED_SUFFIX -toll -tomate -ton -topf -töpf,NEED_SUFFIX -torte -tot -töt,NEED_SUFFIX -tracht -trächt,NEED_SUFFIX -track,ENGLISCH -trad,FREMDWORT -traf -trag -träg,NEED_SUFFIX -trah,FREMDWORT,NEED_SUFFIX -train,NEED_SUFFIX -trakt,FREMDWORT -trank -trans,NO_PREFIX,NO_SUFFIX -trat -trau -traum -träum -travers,FREMDWORT -treck -treff -treib -trenn -trepp -tret -treu -tribut,FREMDWORT -trick -trieb -trief -triff -trink -tritt -trock,NEED_SUFFIX -troff -troll,FREMDWORT,NEED_PREFIX -troll -tropf -trotz -trunk -trute -# Schweiz -tschech -tube -tumult -tun -türk -turm -türm -tusch -typ,FREMDWORT -üb,NEED_SUFFIX -über -übrig -uhr -und,NO_PREFIX,NO_SUFFIX -unicode,TRENNUNG:uni8code -univers,FREMDWORT -uns,NO_PREFIX -unser,NO_PREFIX -unten -unter -upgrad,ENGLISCH -updat,ENGLISCH -urin -vater -ventil,FREMDWORT -vers,FREMDWORT -versibel,FREMDWORT -versible,FREMDWORT -vex,FREMDWORT -video -vieh -viel -vier -vir,FREMDWORT,NEED_SUFFIX -vis,FREMDWORT,NEED_SUFFIX -vogel -volk -voll -völl,NEED_SUFFIX -vom,NO_PREFIX,NO_SUFFIX -von,NO_SUFFIX -vor,NO_SUFFIX -wach -wachs -waffe -waffel -wag -wäg,NEED_SUFFIX -wahl -wähl -wahn -wähn,NEED_SUFFIX -wahr -währ -wald -wäld -walt -wand -wandel -wandl,NEED_SUFFIX -wank -wann -war -wär,NEED_SUFFIX -warum -warb -warf -warm -warn -wart -wärt,NEED_SUFFIX -warum,KEEP_TOGETHER,NO_SUFFIX -wasser -wässer -web -website,TRENNUNG:web8site -websites,TRENNUNG:web8sites -wechsel -weder -weg -wehr -weiger,NEED_SUFFIX -weil -wein -weis -weiß -weit -weizen -welch -welle -welt -wem -wen -wend -werb -werd,NEED_SUFFIX -werf -werk -wert -wesen -wessen -west -wett -wichs -wicht -wickel -wickl,NEED_SUFFIX -wider -wie,NO_SUFFIX -wieder -wiederum -wieg -wies -will -wind -window,ENGLISCH -winn -winter -wir,NO_SUFFIX -wirb -wird -wirk -wirst -wirt -wiss,NEED_SUFFIX -wo,NO_SUFFIX -woch -wöchentlich -wog -wohl -wohn -wöhn,NEED_SUFFIX -woll -worb,NEED_SUFFIX,NEED_PREFIX -word,NEED_SUFFIX -worf,NEED_PREFIX -wort -wört,NEED_SUFFIX -wunsch -wünsch -wupper -wurde -wurf -wurst -würd,NEED_SUFFIX -würg,NEED_SUFFIX -wurz -wurzel -würz -zahl -zähl -zahn -zeh -zeich -zeichn,NEED_SUFFIX -zeig -zeil,NEED_SUFFIX -zeit -zentr,FREMDWORT,NEED_SUFFIX -zerr -zeug -zess,FREMDWORT -zieh -ziel -ziffer -zimmer -zivil,FREMDWORT -zog -zög,NEED_SUFFIX -zoo -zoom,ENGLISCH -zu,NO_SUFFIX -zuck,NEED_SUFFIX -zück,NEED_SUFFIX -zug -züg,NEED_SUFFIX -zweck -zwei -zweifel -zweig -zweit -zwick -zwiebel -zwischen -zwölf -""" - -prefixes = u""" -ab -an -anti,FREMDWORT -auf -#aus -be -bei -bi,FREMDWORT -co,FREMDWORT -de,FREMDWORT -dis,FREMDWORT -ein -ent -enz -er -ex,FREMDWORT -extra -fort -ge -hin -her -im,FREMDWORT -in,FREMDWORT -inter -intra,FREMDWORT -# direkt in den Stämmen mit aufgenommen: kom,FREMDWORT -kon,FREMDWORT -kontra -# mit direkt in den Stämmen mit aufgenommen: mit -nach -para,FREMDWORT -per,FREMDWORT -prä,FREMDWORT -pro,FREMDWORT -proto,FREMDWORT -re,FREMDWORT -rück -selbst -sonder -sonders,NO_SUFFIX -sub,FREMDWORT -super,FREMDWORT -tri,FREMDWORT -um -un -ur -ver -vor -zer -zu -zur -""" - -suffixes = u""" -al,FREMDWORT,ONLY_FIRST -ance,ENGLISCH -at,FREMDWORT,ONLY_FIRST -ateur,FREMDWORT,ONLY_FIRST -ator,FREMDWORT,ONLY_FIRST -atur,FREMDWORT,ONLY_FIRST -ation,FREMDWORT,ONLY_FIRST -atisch,FREMDWORT,ONLY_FIRST -ant,FREMDWORT,ONLY_FIRST -bar -chen,ONLY_FIRST -e,NOT_AFTER:e -ell,FREMDWORT,ONLY_FIRST -em -en -end -#,ONLY_LAST_WORD -er -es -eur,FREMDWORT,ONLY_FIRST -heit -keit -ie,FREMDWORT -iell,FREMDWORT -ier,FREMDWORT -ig -ion,FREMDWORT,ONLY_FIRST -ional,FREMDWORT -in,NOT_AFTER:e -ing,ENGLISCH -is,FREMDWORT,ONLY_FIRST -ität,FREMDWORT -y,ENGLISCH -inter -isier,FREMDWORT,ONLY_FIRST -isation,FREMDWORT,ONLY_FIRST -isateur,FREMDWORT,ONLY_FIRST -isator,FREMDWORT,ONLY_FIRST -isch -ist,FREMDWORT,ONLY_FIRST -ismen,FREMDWORT,ONLY_FIRST,ONLY_LAST -ismus,FREMDWORT,ONLY_FIRST,ONLY_LAST -ität,FREMDWORT -ik,FREMDWORT,ONLY_FIRST -iv,FREMDWORT,ONLY_FIRST -#l,ONLY_FIRST -ler,ONLY_FIRST,ONLY_AFTER:t -lich -n,NOT_AFTER:e n in -nd,ONLY_FIRST,ONLY_LAST_WORD -nis -o,FREMDWORT,ONLY_FIRST,ONLY_LAST -or,FREMDWORT,ONLY_FIRST -s,ONLY_LAST,NOT_AFTER_CHAR:sßx,NOT_AFTER:t -# die Bedingung NOT_AFTER:t ist neu! -schaft -st -t,NOT_AFTER:ant ell em end heit ie in isch n schaft ung ös -te -ten -ter -tes -thek,FREMDWORT,ONLY_FIRST -tum -tümer -uell,FREMDWORT -um,FREMDWORT -ur,FREMDWORT,ONLY_FIRST -us,FREMDWORT -ung,NOT_AFTER:n -ös,ONLY_FIRST -""" - -prefix_chars = u"""([{\"-/¿¬¡""" - -suffix_chars = u"""}]),";.:-/!=?""" diff --git a/dist-packages/wordaxe/wordaxe/dict/README_hyph_de_DE.txt b/dist-packages/wordaxe/wordaxe/dict/README_hyph_de_DE.txt deleted file mode 100755 index 25d8dbf26..000000000 --- a/dist-packages/wordaxe/wordaxe/dict/README_hyph_de_DE.txt +++ /dev/null @@ -1,30 +0,0 @@ -Hyphenation dictionary ----------------------- - -Language: German (de DE). -Origin: Based on the TeX hyphenation tables - http://www.tug.org/tex-archive/language/hyphenation/dehyphn.tex -License: GNU LGPL license. -Author: conversion author is Marco Huggenberger - revised conversion and extensions: Daniel Naber - http://qa.openoffice.org/issues/show_bug.cgi?id=26355 - -Please note, this dictionary is based on syllable matching patterns -and thus should be suitable under other variations of German - -HYPH de DE hyph_de_DE -HYPH de CH hyph_de_CH - --------------------------------------------------------------------------------- -Trennmuster (hyph_de_DE.dic): --------------------------------------------------------------------------------- - -Die Trennmuster (hyph_de_DE.dic) basieren auf den TeX Trennmustern -"dehyphn.tex", revision level 31. -Lizenz der Trennmuster: GNU LGPL. Die Anpassung der Trennmuster an -den in OpenOffice.org benutzten "ALTLinux LibHnj Hyphenator" wurde -mit dem Script substrings.pl durchgeführt, das unter -http://lingucomponent.openoffice.org/hyphenator.html als Teil der -Datei altlinux_Hyph.zip heruntergeladen werden kann. -Die Original-Trennmuster können hier heruntergeladen werden: -http://www.tug.org/tex-archive/language/hyphenation/dehyphn.tex diff --git a/dist-packages/wordaxe/wordaxe/dict/README_hyph_en_GB.txt b/dist-packages/wordaxe/wordaxe/dict/README_hyph_en_GB.txt deleted file mode 100755 index 5ebca5cfb..000000000 --- a/dist-packages/wordaxe/wordaxe/dict/README_hyph_en_GB.txt +++ /dev/null @@ -1,18 +0,0 @@ -Hyphenation dictionary ----------------------- - -Language: English (en US). -Origin: Based on the TeX hyphenation tables -License: GNU LGPL license. -Author: conversion author is Peter Novodvorsky - -This hyphenation dictionary is based on syllable matching patterns and -should be usable under other variations of English - -HYPH en US hyph_en_US -HYPH en CA hyph_en_CA -HYPH en GB hyph_en_GB -HYPH en AU hyph_en_AU - - - diff --git a/dist-packages/wordaxe/wordaxe/dict/README_hyph_en_US.txt b/dist-packages/wordaxe/wordaxe/dict/README_hyph_en_US.txt deleted file mode 100755 index b6d6b0f5f..000000000 --- a/dist-packages/wordaxe/wordaxe/dict/README_hyph_en_US.txt +++ /dev/null @@ -1,18 +0,0 @@ -Hyphenation dictionary ----------------------- - -Language: English (en US). -Origin: Based on the TeX hyphenation tables -License: GNU LGPL license. -Author: conversion author is Peter Novodvorsky - -This hyphenation dictionary is based on syllable matching patterns and -should be usable under other variations of English - -HYPH en US hyph_en_US -HYPH en CA hyph_en_CA -HYPH en GB hyph_en_GB -HYPH en AU hyph_en_AU - - - diff --git a/dist-packages/wordaxe/wordaxe/dict/__init__.py b/dist-packages/wordaxe/wordaxe/dict/__init__.py deleted file mode 100755 index 605501b7f..000000000 --- a/dist-packages/wordaxe/wordaxe/dict/__init__.py +++ /dev/null @@ -1,10 +0,0 @@ -# -*- coding: UTF-8 -*- - -__license__=""" - Copyright 2004-2008 Henning von Bargen (henning.vonbargen arcor.de) - This software is dual-licenced under the Apache 2.0 and the - 2-clauses BSD license. For details, see license.txt -""" - -__version__=''' $Id: __init__.py,v 1.2 2004/05/31 22:22:12 hvbargen Exp $ ''' -__doc__='Dictionary files' diff --git a/dist-packages/wordaxe/wordaxe/dict/hyph_da.dic b/dist-packages/wordaxe/wordaxe/dict/hyph_da.dic deleted file mode 100755 index d5a527935..000000000 --- a/dist-packages/wordaxe/wordaxe/dict/hyph_da.dic +++ /dev/null @@ -1,1146 +0,0 @@ -abcdefghijklmnopqrstuvwxyzæøå -.ae3 -.an3k -.an1s -.be5la -.be1t -.bi4tr -.der3i -.diagno5 -.her3 -.hoved3 -.ne4t5 -.næv5nt. -.om1 -.ove4 -.po1 -.så3 -.til3 -.yd5r -.ær5i -.øv3r -ab5le -3abst -a3c -ade5la -5adg -a1e -5afg -5a4f1l -af3r -af4ri -5afs -a4gef -a4gi -ag5in -ag5si -3agti -a4gy -a3h -ais5t -a3j -a5ka -a3ke -a5kr -aku5 -a3la -a1le -a1li -al3k -4alkv -a1lo -al5si -a3lu -a1ly -am4pa -3analy -an4k5r -a3nu -3anv -a5o -a5pe -a3pi -a5po -a1ra -ar5af -1arb -a1re -5arg -a1ri -a3ro -a3sa -a3sc -a1si -a3sk -a3so -3a3sp -a3ste -a3sti -a1ta1 -a1te -a1ti -a4t5in -a1to -ato5v -a5tr -a1tu -a3tø -a5va -a1ve -a5væ -a5z -1ba -ba4ti -4bd -1be -be1k -be3ro -be5ru -be1s4 -be1tr -1bi -bi5sk -b1j -4b1n -1bo -bo4gr -bo3ra -bo5re -1br4 -brød3 -4bs -bs5k -b3so -b1st -b5t -3bu -bu4s5tr -b5w -1by -by5s -5bæ -4c1c -1ce -ce5ro -3ch -4ch. -ci4o -ck3 -5cy -3da -4d3af -d5anta -da4s -d1b -d1d4 -1de -de5d -4de4lem -der5eri -de4rig -de5sk -d1f -d1g -d3h -1di -di1e -di5l -d3j -d1k -d1l -d1m -4d1n -3do -4dop -d5ov -d1p -4drett -5d4reve -3drif -3driv -d5ros -d5ru -5drøv -ds5an -ds5in -d1ski -d4sm -dstå4 -d4su -dsu5l -ds5vi -d3ta -d1te -dt5o -d5tr -dt5u -1du -dub5 -d1v -3dy -3dæ -3dø -e5ad -e3af -e5ag -e3ak -e1al -ea4la -e3an -e5ap -e3at -e3bl -ebs3 -e1ci -ed5ar -edde4 -eddel5 -e4do -ed5ra -ed3re -ed3rin -ed4str -e3e -3eff -e3fr -3eft -e3gu -e1h -e3in -ei5s -e3je -e4j5el -e1ka -e3ke -e3kl -4e1ko -e5kr -ek5sa -3eksem -3eksp -e3ku -e1kv -e5ky -e3lad -el3ak -el3ar -e1las -e3le -e4lek -3elem -e1li -5elim -e3lo -el5sa -e5lu -e3ly -e3læ -e3lø -e4mad -em4p5le -em1s -en5ak -e4nan -4enn -e4no -en3so -e5nu -e5ol -e3op -e1or -e3ov -epi3 -e1pr -e3ra -er3af -e4rag -e4rak -e1re -e4ref -er5ege -5erhv -e1ri -e4rib -er1k -ero5d -er5ov -er3s -er5tr -e3rum -er5un -e5ry -e3rø -er5øn -e1ta -e1te -etek4s -e1ti -e3tj -e1to -e3tr -e3tu -e1ty -e5tæ -e5tø -e3um -e3un -3eur -e1va -e3ve -e4v3erf -e1vi -e1væ -e5x -e3æ -e5å -1fa -fa4ce -fags3 -f1b -f1d -1fe -fej4 -fejl1 -f1f -f1g -f1h -1fi -f1k -3fl -1fo -for1en -fo4ri -f1p -f1s4 -4ft -f3ta -f1te -f1ti -f5to -f5tvi -1fu -f1v -3fy -3fæ -3fø -fø4r5en -1ga -g3art -g1b -g1d -1ge -4g5enden -ger3in -ge3s -g3f -g1g -g1h -1gi -gi4b -gi3st -giø4 -5gj -g3k -g1l -g1m -3go -4g5om -g5ov -g3p -1gr -gs1a -gsde4len -g4se -gsha4 -g5sla -gs3or -gs1p -g5s4tide -g4str -gs1v -g4sø -g5så -g3ta -g1te -g1ti -g5to -g3tr -gt4s -g3ud -gun5 -g3v -1gy -g5yd -3gæ -3gø1 -3gå -4ha. -heds3 -he5s -4het -hi4e -hi4n5 -hi3s -ho5ko -ho5ve -4h3t -hun4 -hund3 -hvo4 -i1a -i3b -i4ble -i1c -i3dr -ids5k -i1el -i1en -i3er -i3et. -if3r -i3gu -i3h -i5i -i5j -i1ka -i1ke -ik1l -i5ko -ik3re -ik5ri -iks5t -ik4tu -i3ku -ik3v -i3lag -il3eg -il5ej -il5el -i3li -i4l5id -il3k -i1lo -il5u -i3mu -ind3t -5inf -ings1 -in3s -in4sv -inter1 -i3nu -i3od -i3og -i5ok -i3ol -ion4 -ions1 -i5o5r -i3ot -i5pi -i3pli -i5pr -i3re -i3ri -ir5t -i3sc -i3si -i4sm -is3p -i1ster -i3sti -i5sua -i1ta -i1te -i1ti -i3to -i3tr -it5re. -i1tu -i3ty -i5tæ -i1u -i1va -i1ve -i1vi -i3ø -j3ag -jde4rer -jds1 -jek4to -4j5en. -j5k -j3le -j3li -jlmeld5 -jlmel4di -j3r -jre5 -ju3s -5kap -k5au -5kav -k5b -kel5s -ke3sk -ke5st -ke4t5a -k3h -ki3e -ki3st -k1k -k5lak -k1le -3klu -k4ny -5kod -1kon -ko3ra -3kort -ko3v -1kra -5kry -ks3an -k1si -ks3k -ks1p -k3ste -k5stu -ks5v -k1t -k4tar -k4terh -kti4e -kt5re -kt5s -3kur -1kus -3kut -k4vo -k4vu -3kø -3kå -5lab -lad3r -5lagd -la4g3r -5lam -1lat -l1b -ldiagnos5 -l3dr -ld3st -1le. -5led -4lele -le4mo -3len -1ler -1les -4leu -l1f -lfin4 -lfind5 -l1go1 -l3h -li4ga -lingeniø4 -4l5ins -4l3int -li5o -l3j -l1ke -l1ko -l3ky -l1l -l5mu -lo4du -l3op -4l5or -3lov -4l3p -l4ps -l3r -4ls -lses1 -ls5in -l5sj -l1ta -l4taf -l1te -l4t5erf -l3ti -lt3o -l3tr -l3tu -lu5l -l3ve -l3vi -l3væ -5løs -1ma -m1b -m3d -1me -4m5ej -m3f -m1g -m3h -1mi -mi3k -m5ing -mi4o -mi5sty -m3k -m1l -m1m -mmen5 -m1n -3mo -mo4da -4mop -4m5ov -m1pe -m3pi -m3pl -m1po -m3pr -m1r -mse5s -ms5in -m5sk -ms3p -m3ste -ms5v -m3ta -m3te -m3ti -m3tr -m5tå -m1ud -1mul -mu1li -3my -1mæ -3mø -3må -3na -4nak -1nal -n1b -n1c -4nd -n3dr -nd5si -nd5sk -nd5sp -1ne -ne5a -ne4da -nemen4 -nement5e -neo4 -n3erk -n5erl -ne5sl -ne5st -n1f -n4go -4n1h -1ni -4nim -ni5o -ni3st -n1ke -n1ko -n3kr -n3ku -n5kv -n3kæ -4n1l -n1m -n1n -1no -n3ord -n5p -n3r -4ns -n3si -n1sku -ns3po -n1sta -n5sti -n1ta -nta4le -n1te -n1ti -ntiali4 -n3to -n1tr -nt4s5t -nt4su -n3tu -n3ty -n5tæ -4n1v -3ny -n3z -3næ -4n5æb -5nø -o3a -o4as -ob3li -o1c -o4din -od5ri -od5s -od5un -o1e -of5r -o4gek -o4gel -o4g5o -og5re -og5sk -o5h -o5in -oi6s5e -o1j -o3ka -o1ke -o3ku -o3la -o3le -o1li -o1lo -o3lu -o5ly -o5læ -1omr -on3k -ook5 -o3or -o5ov -o3pi -op3l -op3r -op3s -3opta -4or. -or1an -3ordn -ord5s -o3re. -o3reg -o3rek -o3rer -o3re3s -o3ret -o3ri -3orient -or5im -o4r5in -or3k -or5o -or3sl -or3st -or3ø -o3si -o3so -o3t -o1te -o5un -ov4s -o5å -3pa -pa5gh -p5anl -p3d -4pec -3pen -1per -pe1ra -pe5s -pe3u -p3f -4p5h -1pla -p4lan -4ple. -4pler -4ples -p3m -p3n -5pok -4po3re -3pot -4p5p4 -p4ro -1proc -5præ -p3sk -p5so -ps4p -p3st -p1t -1pu -pu5b -p5ule -p5v -5py3 -5pæd -på3 -qu4 -4raf -ra5is -4rarb -r1b -r4d5ar -r3dr -rd4s3 -4reks -1rel -re5la -r5enss -5rese -re5spo -4ress -re3st -re5s4u -5rett -r1f -r1gu -r1h -ri1e -ri5la -4rimo -r4ing -ringse4 -ringso4r -4rinp -4rint -r3ka -r1ke -r1ki -rk3so -r3ku -r5kæ -r1l -rmo4 -r5mu -r1n -ro1b -ro3p -r3or -r3p -r1r -rre5s -rro4n5 -r1sa -r1si -r5skr -r4sk5v -rs4n -r3sp -r5stu -r5su -r3sv -r5tal -r1te -r4teli -r1ti -r3to -r4t5or -rt5rat -rt3re -r5tri -r5tro -rt3s -r5ty -r5tæ -r5tø -r3ud -run4da -5rut -r3va -r1ve -r3vi -r3væ -ry4s -r5æl -4røn -5rør -3råd -r5år -s3af -1sam -sa4ma -s3ap -s1ar -1sat -4s1b -s1d -sdy4 -1se -s4ed -5s4er -se4se -s1f -4s1g4 -4s3h -si4bl -1sig -s5int -5sis -5sit -5siu -s5ju -4sk. -1skab -1ske -s3kl -sk5s4 -5sky -s4kå -s1le -s1li -slo3 -5slu -s5ly -3slå -s1m -s4my -4snin -s4nit -s4næ -so5k -5sol -5som. -3somm -s5oms -5somt -3son -4s1op -sp4 -3spec -4sper -3s4pi -s1pl -3sprog. -s5r4 -s1s4 -4st. -5s4tam -1stan -st5as -3stat -1stav -1ste. -1sted -3stel -5stemo -1sten -5step -3ster. -3stes -5stet -5stj -3sto -st5om -1str -5stø -1stå -s1ud -3sul -s3un -3sur -s3ve -3s4y -1sy1s -1sæ -4s5æn -1sø -s5øk -så4r5 -5ta. -1tag -tands3 -4tanv -4tb -tede4l -teds5 -3teg -5tekn -teo1 -5term -te5ro -4t1f -6t3g -t1h -tialis5t -3tid -ti4en -ti3st -ti4ø -4t3k -4t1l -tli4s5 -t1m -t1n -to5ra -to1re -to1ri -tor4m -4t3p -t4ra -4tres -tro5v -1try -3træk. -4ts -t3si -ts4pa -ts5pr -t3st -ts5ul -t4sø -t5så -4t1t -t5uds -5tur -t5ve -t3væ -1typ -u1a -5udl -ud5r -ud3s -3udv -u1e -ue4t5 -uge4ri -ugs3 -u5gu -u3i -u5kl -uk4ta -uk4tr -u1la -u1le -u5ly -u3læ -u5pe -up5l -u5q -u3ra -u3re -u4r3eg -u1rer -u3ro -us5a -u3si -u5ska -u5so -us5v -u1te -u1ti -u1to -ut5r -ut5s4 -5u5v -va5d -3varm -1ved -ve4l5e -ve4reg -ve3s -5vet -v5h -vi4l3in -1vis -v5j -v5k -vl4 -v3le -v5li -vls1 -1vo -4v5om -v5p -v5re -v3st -v5su -v5t -3vu -3værd -1værk -5vå -y3a -y5dr -y3e -y3ke -y5ki -yk3li -y3ko -yk4s5 -y3kv -y5li -y5lo -y5mu -yns5 -y5o -y1pe -y3pi -y3re -yr3ek -y3ri -y3si -y3ti -y5t3r -y5ve -y5væ -zi5o -æb3l -æ3c -æ3e -æg5a -æ4gek -æ4g5r -ægs5 -æ5i -æ5kv -ælle4 -æn1dr -æ5o -æ1re -ær4g5r -æ3ri -ær4ma -ær4mo -ær5s -æ5si -æ3so -æ3ste -æ3ve -øde5 -ø3e -ø1je -ø3ke -ø3le -øms5 -øn3st -øn4t3 -ø1re -ø3ri -ørne3 -ør5o -ø1ve -å1d -å1e -å5h -å3l -å3re -års5t -å5sk -å3t \ No newline at end of file diff --git a/dist-packages/wordaxe/wordaxe/dict/hyph_de.dic b/dist-packages/wordaxe/wordaxe/dict/hyph_de.dic deleted file mode 100755 index 5223e3613..000000000 --- a/dist-packages/wordaxe/wordaxe/dict/hyph_de.dic +++ /dev/null @@ -1,5799 +0,0 @@ -abcdefghijklmnopqrstuvwxyzäöüß -.aa6l -.ab3a4s -.ab3ei -.abi2 -.ab3it -.ab1l -.ab1r -.ab3u -.ad3o4r -.alti6 -.ana3c -.an5alg -.an1e -.ang8s2t1 -.an1s -.ap1p -.ar6sc -.ar6ta -.ar6tei -.as2z -.au2f1 -.au2s3 -.be5erb -.be3na -.ber6t5r -.bie6r5 -.bim6s5t -.brot3 -.bru6s -.ch6 -.che6f5 -.da8c -.da2r -.dar5in -.dar5u -.den6ka -.de5r6en -.des6pe -.de8spo -.de3sz -.dia3s4 -.dien4 -.dy2s1 -.ehren5 -.eine6 -.ei6n5eh -.ei8nen -.ein5sa -.en6der -.en6d5r -.en3k4 -.en8ta8 -.en8tei -.en4t3r -.epo1 -.er6ban -.er6b5ei -.er6bla -.er6d5um -.er3ei -.er5er -.er3in -.er3o4b -.erwi5s -.es1p -.es8t1l -.es8t1n -.ex1a2 -.ex3em -.fal6sc -.fe6st5a -.flu4g3 -.furch8 -.ga6ner -.ge3n4a -.ge5rö -.ges6 -.halb5 -.halbe6 -.hal6br -.haup4 -.hau4t -.heima6 -.he4r3e -.her6za -.he5x -.hin3 -.hir8sc -.ho4c -.hu3sa -.hy5o -.ibe5 -.ima6ge -.in1 -.ini6 -.is5chi -.jagd5 -.kal6k5o -.ka6ph -.ki4e -.kop6f3 -.kraf6 -.kü5ra -.lab6br -.liie6 -.lo6s5k -.lö4s3t -.ma5d -.mi2t1 -.no6th -.no6top -.obe8ri -.ob1l -.obs2 -.ob6st5e -.or3c -.ort6s5e -.ost3a -.oste8r -.pe4re -.pe3ts -.ph6 -.po8str -.rau4m3 -.re5an -.ro8q -.ru5the -.rü5be -.sch8 -.se6e -.se5n6h -.se5ra -.si2e -.spi6ke -.st4 -.sy2n -.tages5 -.tan6kl -.ta8th -.te6e -.te8str -.to6der -.to8nin -.to6we -.um1 -.umpf4 -.un1 -.une6 -.unge5n -.ur1c -.ur5en -.ve6rin -.vora8 -.wah6l5 -.we8ges -.we8s2t -.wes3te -.wo6r -.wor3a -.wun4s -.zi4e -.zuch8 -.ände8re -.öch8 -aa1c -aa2gr -aal5e -aa6r5a -a5arti -aa2s1t -aat2s -6aba -ab3art -1abdr -6abel -aben6dr -ab5erk -ab5err -ab5esse -1abf -1abg -1abhä -ab1ir -1abko -a1bl -ab1la -5ablag -a6blaß -ab4ler -ab1lu -a8blä -5a6blö -abma5c -1abn -ab1ra -ab1re -5a6brec -ab1ro -ab1s -ab8sk -abs2z -3abtei -ab1ur -1abw -5abze -5abzu -ab1än -abäu8 -a4ce. -a5chal -ach5art -ach5au -a1che -a8chent -ach6er. -a6ch5erf -a1chi -ach1l -ach3m -ach5n -a1cho -ach3re -a1chu -ach1w -a1chy -ach5äf -ack1o -acks6t -ack5sta -a1d -8ad. -a6d5ac -ad3ant -ad8ar -5addi -a8dein -ade5o8 -adi5en -1adj -1adle -ad1op -a2dre -3adres -adt1 -1adv -a6dä -a1e2d -ae1r -a1er. -1aero -8afa -a3fal -af1an -a5far -a5fat -af1au -a6fentl -a2f1ex -af1fr -af5rau -af1re -1afri -af6tent -af6tra -aft5re -a6f5um -8afä -ag5abe -5a4gent -ag8er -ages5e -1aggr -ag5las -ag1lo -a1gn -ag2ne -1agog -a6g5und -a1ha -a1he -ah5ein -a4h3erh -a1hi -ahl1a -ah1le -ah4m3ar -ahn1a -a5ho -ahra6 -ahr5ab -ah1re -ah8rei -ahren8s -ahre4s3 -ahr8ti -ah1ru -a1hu -ah8ö -ai3d2s -ai1e -aif6 -a3inse -ai4re. -a5isch. -ais8e -a3ismu -ais6n -aiso6 -a1j -1akad -a4kade -a1ke -a1ki -1akko -5akro1 -a5lal -al5ans -3al8arm -al8beb -al8berw -alb5la -3album -al1c -a1le -a6l5e6be -a4l3ein -a8lel -a8lerb -a8lerh -a6lert -5a6l5eth -1algi -al4gli -al3int -al4lab -al8lan -al4l3ar -alle3g -a1lo -a4l5ob -al6schm -al4the -al4t3re -8a1lu -alu5i -a6lur -alu3ta -a1lä -a6mate -8ame. -5a6meise -am6m5ei -am6mum -am2n -ampf3a -am6schw -am2ta -a1mu -a1mä -a3nac -a1nad -anadi5e -an3ako -an3alp -3analy -an3ame -an3ara -a1nas -an5asti -a1nat -anat5s -an8dent -ande4s3 -an1ec -an5eis -an1e2k -4aner. -a6n5erd -a8nerf -a6n5erke -1anfa -5anfert -1anfä -3angab -5angebo -an3gli -ang6lis -an2gn -3angri -ang5t6 -5anhä -ani5g -ani4ka -an5i8on -an1kl -an6kno -an4kro -1anl -anma5c -anmar4 -3annah -anne4s3 -a1no -5a6n1o2d -5a6n3oma -5a6nord -1anr -an1sa -5anschl -an4soz -an1st -5anstal -an1s2z -5antenn -an1th -5anwä -a5ny -an4z3ed -5anzeig -5anzieh -3anzug -an1ä -5anäs -a1nö -anö8d -a1os -a1pa -3apfel -a2ph1t -aph5ä6 -a1pi -8apl -apo1c -apo1s -a6pos2t -a6poth -1appa -ap1pr -a1pr -a5pä -a3pü -a1ra -a4r3af -ar3all -3arbei -2arbt -ar1c -2a1re -ar3ein -ar2gl -2a1ri -ari5es -ar8kers -ar6les -ar4nan -ar5o6ch -ar1o2d -a1rol -ar3ony -a8ror -a3ros -ar5ox -ar6schl -8artei -ar6t5ri -a1ru -a1ry -1arzt -arz1w -ar8zä -arä8m -arö6 -ar5öm -ar1ü2 -a1sa -a6schec -asch5l -asch3m -a6schn -a3s4hi -as1pa -asp5l -as5tev -1asth -a1str -ast3re -8a1ta -ata5c -ata3la -a6tapf -ata5pl -a1te -a6teli -aten5a -ate5ran -6atf -6atg -a1th -at3hal -1athl -2a1ti -5atlant -3atlas -8atmus -6atn -a1to -a6t5ops -ato6ra -a6t5ort. -4a1tr -a6t5ru -at2t1h -at5t6hä -6a1tu -atz1w -a1tä -a1tü -au1a -au6bre -auch3a -au1e -aue4l -5aufent -3auffü -3aufga -1aufn -auf1t -3auftr -1aufw -3auge. -au4kle -aule8s -6aum -au8mar -aum5p -1ausb -3ausd -1ausf -1ausg -au8sin -au4sta -1ausw -1ausz -aut5eng -au1th -1auto -auße8 -a1v -ave5r6a -aver6i -a1w -a6wes -a1x -a2xia -a6xio -a1ya -a1z -azi5er. -8aß -1ba -8ba8del -ba1la -ba1na -ban6k5r -ba5ot -bardi6n -ba1ro -basten6 -bau3sp -2b1b -bb6le -b2bli -2b1c -2b1d -1be -be1a -be8at. -be1ch -8becht -8becke. -be5el -be1en -bee8rei -be5eta -bef2 -8beff -be1g2 -behö8 -bei1s -6b5eisen -bei3tr -b8el -bel8o -belu3t -be3nac -bend6o -be6ners -be6nerw -be4nor -ben4se6 -bens5el -be1nä -be1nü -be1o2 -b8er. -be1ra -be8rac -ber8gab. -ber1r -be1rü -bes8c -bes5erh -bes2p -be5tha -bet5sc -be1un -be1ur -8bex -be6zwec -2b1f8 -2b1g2 -bga2s5 -bge1 -2b1h -bhole6 -1bi -bi1bl -b6ie -bi1el -bi1la -bilä5 -bi1na -bi4nok -bi6stu -bi5tr -bit4t5r -b1j -2b1k2 -bkü6 -bl8 -b6la. -6b1lad -6blag -8blam -1blat -b8latt -3blau. -b6lav -3ble. -b1leb -b1led -8b1leg -8b1leh -8bleid -8bleih -6b3lein -ble4m3o -4blich -b4lind -8bling -b2lio -5blit -b4litz -b1loh -8b1los -1blu -5blum -2blun -blut3a -blut5sc -3blä -bläs5c -5blö -3blü -blü8sc -2b1m -2b1n -1bo -bo1ch -bo5d6s -boe5 -8boff -8bonk -bo1ra -b1ort -2b1p2 -b1q -1br -brail6 -brast8 -bre4a -b5red -8bref -8b5riem -b6riga -bro1s -b1rup -b2ruz -8bröh -brös5c -8bs -b1sa -b8sang -b2s1ar -b1sc -bs3erl -bs3erz -b8sof -b1s2p -bst1h -b3stru -b5stä -b6sun -2b1t -b2t1h -1bu -bu1ie -bul6k -b8ure -bu6sin -6b1v -2b1w -1by1 -by6te. -8b1z -1bä -b5ä6s5 -1bü -b6ü5bere -büge6 -bügel5e -bür6sc -1ca -cag6 -ca5la -ca6re -ca5y -c1c -1ce -celi4c -celich5 -ce1ro -c8h -2ch. -1chae -ch1ah -ch3akt -cha6mer -8chanz -5chara -3chari -5chato -6chb -1chef -6chei -ch3eil -ch3eis -6cherkl -6chf -4chh -5chiad -5chias -6chins -8chj -chl6 -5chlor -6ch2m -2chn6 -ch8nie -5cho. -8chob -choi8d -6chp -ch3ren -ch6res -ch3rü -2chs -2cht -cht5ha -cht3hi -5chthon -ch6tin -6chuh -chu4la -6ch3unt -chut6t -8chw -1ci -ci5tr -c2k -2ck. -ck1ei -4ckh -ck3l -ck3n -ck5o8f -ck1r -2cks -ck5stra -ck6s5u -c2l -1c8o -con6ne -8corb -cos6t -c3q -1c6r -8c1t -1cu -1cy -5cä1 -cö5 -1da. -8daas -2dabg -8dabr -6dabt -6dabw -1dac -da2gr -6d5alk -8d5amt -dan6ce. -dani5er -dan8ker -2danl -danla6 -6dans -8danzi -6danzu -d1ap -da2r1a8 -2d1arb -d3arc -dar6men -4d3art -8darz -1dat -8datm -2d1auf -2d1aus -2d1b -2d1c -2d1d -d5de -d3d2h -ddämme8 -1de -2deal -de5an -de3cha -de1e -defe6 -6deff -2d1ehr -5d4eic -de5isc -de8lar -del6s5e -del6spr -de4mag -de8mun -de8nep -dene6r -8denge. -8dengen -de5o6d -2deol -de5ram -8derdb -der5ein -de1ro -der1r -d8ers -der5um -de4s3am -de4s3an -de4sau -de6sil -de4sin -de8sor -de4spr -de2su -8deul -de5us. -2d1f -df2l -2d1g -2d1h -1di -dia5c -di5ara -dice5 -di3chr -di5ena -di1gn -di1la -dil8s -di1na -8dind -6dinf -4d3inh -2d1ins -di5o6d -di3p4t -di8sen -dis1p -di5s8per -di6s5to -dis3tr -di8tan -di8tin -d1j -6dje -2dju -2d1k -2d1l -2d1m -2d1n6 -dni6 -dnje6 -1do -6d5obe -do6berf -6d5ony -do3ran -6dord -2d1org -dor4t3h -6doth -dott8e -2d1p -d5q -dr4 -1drah -8drak -d5rand -6dre. -4drech -d6reck -4d3reg -8d3reic -d5reife -8drem -8d1ren -2drer -8dres. -6d5rh -1dria -d1ric -8drind -droi6 -dro5x -1dru -8drut -drös5c -1drü -drü5b -drü8sc -2ds -d1sa -d6san -dsat6 -d1sc -5d6scha. -5dschik -dse8e -d8serg -8dsl -d1sp -d4spak -ds2po -d8spä -d1st -d1sü -2dt -d1ta -d1te -d1ti -d1to -dt1s6 -d1tu -d5tä -1du -du5als -du1b6 -du1e -duf4t3r -4d3uh -du5ie -8duml -8dumw -2d1und -du8ni -6d5unt -dur2c -durch3 -6durl -6dursa -8durt -dus1t -du8schr -2d1v -2d1w -dwa8l -2d1z -1dä -6däh -8dänd -dä6r -dö8bl -d5öl -dör6fl -dö8sc -d5ö4st -1dü -ea4ben -e1ac -e1ah -e1akt -e1al. -e5alf -e1alg -e5a8lin -e1alk -e1all -e5alp -e1alt -e5alw -e1am -e1and -ea6nim -e1ar. -e5arf -e1ark -e5arm -e3art -e5at. -e6ate -e6a5t6l -e8ats -e5att -e6au. -e1aus -e1b -e6b5am -ebens5e -eb4lie -eb4ser -eb4s3in -e1che -e8cherz -e1chi -ech3m -8ech3n -ech1r -ech8send -ech4su -e1chu -eck5an -e5cl -e1d -ee5a -ee3e -ee5g -e1ei -ee5isc -eei4s3t -ee6lend -e1ell -ee5lö -e1erd -ee3r4e -ee8reng -eere6s5 -ee5rä -ee6tat -e1ex -e1f -e6fau -e8fe8b -3effek -ef3rom -ege6ra -eglo6si -1egy -e1ha -e6h5ach -eh5ans -e6hap -eh5auf -e1he -e1hi -ehl3a -eh1le -ehl5ein -eh1mu -ehn5ec -e1ho -ehr1a -eh1re -ehre6n -eh1ri -eh1ru -ehr5um -e1hu -eh1w -e1hy -e1hä -e1hö -e3hüt -ei1a -eia6s -ei6bar -eich3a -eich5r -ei4dar -ei6d5ei -ei8derf -ei3d4sc -ei1e -8eifen -3eifri -1eign -eil1d -ei6mab -ei8mag -ein1a4 -ei8nat -ei8nerh -ei8ness -ei6nete -ein1g -e8ini -ein1k -ei6n5od -ei8nok -ei4nor -e3insä -ei1o -e1irr -ei5ru -ei8sab -ei5schn -ei6s5ent -ei8sol -ei4t3al -eit3ar -eit1h -ei6thi -ei8tho -eit8samt -ei6t5um -e1j -1ekd -e1ke -e1ki -e1k2l -e1kn -ekni4 -e1la -e2l1al -6elan -e6lanf -e8lanl -e6l5ans -el3arb -el3arm -e6l3art -5e6lasti -e6lauge -elbst5a -e1le -6elef -ele6h -e6l5ehe -e8leif -e6l5einh -1elek -e8lel -3eleme -e6lemen -e6lente -el5epi -e4l3err -e6l5ersc -elf2l -elg2 -e6l5ins -ell8er -4e1lo -e4l3ofe -el8soh -el8tent -5eltern -e1lu -elut2 -e1lä -e1lü -em8dei -em8meis -4emo -emo5s -1emp1f -1empt -1emto -e1mu -emurk4 -emurks5 -e1mä -en5a6ben -en5achs -en5ack -e1nad -en5af -en5all -en3alt -en1am -en3an. -en3ant -en3anz -en1a6p -en1ar -en1a6s -6e1nat -en3auf -en3aus -en2ce -enda6l -end5erf -end5erg -en8dess -4ene. -en5eck -e8neff -e6n5ehr -e6n5eim -en3eis -6enem. -6enen -e4nent -4ener. -e8nerd -e6n3erf -e4nerg -5energi -e6n5erla -en5ers -e6nerst -en5erw -6enes -e6n5ess -e2nex -en3glo -2eni -enni6s5 -ennos4 -enns8 -e1no -e6nober -eno8f -en5opf -e4n3ord -en8sers -ens8kl -en1sp -ens6por -en5t6ag -enta5go -en8terbu -en6tid -3entla -ent5ric -5entwic -5entwu -1entz -enu5i -e3ny -en8zan -en1öf -e1nös -e1nüg -eo1c -e5o6fe -e5okk -e1on. -e3onf -e5onk -e5onl -e5onr -e5opf -e5ops -e5or. -e1ord -e1org -eo5r6h -eo1t -e1pa -e8pee -e6p5e6g -ep5ent -e1p2f -e1pi -5epid -e6pidem -e1pl -5epos -e6pos. -ep4p3a -e1pr -e1pä -e1q -e1ra. -er5aal -8eraba -e5rabel -er5a6ben -e5rabi -er3abs -er3ach -era5e -era5k6l -er3all -er3amt -e3rand -e3rane -er3ans -e5ranz. -e1rap -er3arc -e3rari -er3a6si -e1rat -erat3s -er3auf -e3raum -3erbse -er1c -e1re -4e5re. -er3eck -er5egg -er5e2h -2erei -e3rei. -e8reine -er5einr -6eren. -e4r3enm -4erer. -e6r5erm -er5ero -er5erst -e4r3erz -er3ess -5erfül -er8gan. -5ergebn -er2g5h -5ergänz -5erhöhu -2e1ri -eri5ak -e6r5iat -e4r3ind -e6r5i6n5i6 -er5ins -e6r5int -er5itio -er1kl -3erklä -5erlös. -ermen6s -er6nab -3ernst -6e1ro. -e1rod -er1o2f -e1rog -6e3roi -ero8ide -e3rol -e1rom -e1ron -e3rop8 -e2r1or -e1ros -e1rot -er5ox -ersch4 -5erstat -er6t5ein -er2t1h -er5t6her -2e1ru -eruf4s3 -e4r3uhr -er3ums -e5rus -5erwerb -e1ry -er5zwa -er3zwu -erä8m -er5äs -erö8 -e3rös. -e6r1ü2b -e1sa -esa8b -e8sap -e6s5a6v -e1sc -esch4l -ese1a -es5ebe -eserve5 -e8sh -es5ill -es3int -es4kop -e2sl -eso8b -e1sp -espei6s5 -es2po -es2pu -5essenz -e6stabs -e6staf -e6st5ak -est3ar -e8stob -e1str -est5res -es3ur -e2sz -e1sü -e1ta -et8ag -etari5e -eta8ta -e1te -eten6te -et5hal -e5thel -e1ti -1etn -e1to -e1tr -et3rec -e8tscha -et8se -et6tei -et2th -et2t1r -e1tu -etu1s -et8zent -et8zw -e1tä -e1tö -e1tü -eu1a2 -eu1e -eue8rei -eu5fe -euin5 -euk2 -e1um. -eu6nio -e5unter -eu1o6 -eu5p -3europ -eu1sp -eu5str -eu8zo -e1v -eval6s -eve5r6en -ever4i -e1w -e2wig -ex1or -1exp -1extr -ey3er. -e1z -e1ä2 -e5ö8 -e1ü -e8ßes -fa6ch5i -fade8 -fa6del -fa5el. -fal6lo -falt8e -fa1na -fan4gr -6fanl -6fap -far6ba -far4bl -far6r5a -2f1art -fa1sc -fau8str -fa3y -2f1b2 -6f1c -2f1d -1fe -2f1eck -fe6dr -feh6lei -f6eim -8feins -f5eis -fel5en -8feltern -8femp -fe5rant -4ferd. -ferri8 -fe8stof -fe6str -fe6stum -fe8tag -fet6ta -fex1 -2ff -f1fa -f6f5arm -f5fe -ffe5in -ffe6la -ffe8ler -ff1f -f1fla -ff3lei -ff4lie -ff8sa -ff6s5ta -2f1g2 -fgewen6 -4f1h -1fi -fid4 -fi3ds -fieb4 -fi1la -fi8lei -fil4m5a -f8in. -fi1na -8finf -fi8scho -fi6u -6f1j -2f1k2 -f8lanz -fl8e -4f3lein -8flib -4fling -f2lix -6f3lon -5flop -1flor -5f8läc -3flöt -2f1m -2f1n -1fo -foh1 -f2on -fo6na -2f1op -fo5ra -for8mei -for8str -for8th -for6t5r -fo5ru -6f5otte -2f1p8 -f1q -fr6 -f5ram -1f8ran -f8raß -f8re. -frei1 -5frei. -f3reic -f3rest -f1rib -8f1ric -6frig -1fris -fro8na -fräs5t -2fs -f1sc -f2s1er -f5str -fs3tät -2ft -f1tak -f1te -ft5e6h -ftere6 -ft1h -f1ti -f5to -f1tr -ft5rad -ft1sc -ft2so -f1tu -ftwi3d4 -ft1z -1fu -6f5ums -6funf -fun4ka -fu8ßend -6f1v -2f1w -2f1z -1fä -fä1c -8färm -6fäug -fä8ß -föde3 -8föf -3för -1fü -fün4f3u -1ga -ga6bl -6gabw -8gabz -g3a4der -ga8ho -ga5isc -4gak -ga1la -6g5amt -ga1na -gan5erb -gan6g5a -ga5nj -6ganl -8gansc -6garb -2g1arc -2g1arm -ga5ro -6g3arti -ga8sa -ga8sc -ga6stre -2g1atm -6g5auf -gau5fr -g5aus -2g1b -g5c -6gd -g1da -1ge -ge1a2 -ge6an -ge8at. -ge1e2 -ge6es -gef2 -8geff -ge1g2l -ge1im -4g3eise -geist5r -gel8bra -gelt8s -ge5lö -ge8nin -gen3k -6g5entf -ge3nä -ge1or -ge1ra -ge6rab -ger8au -8gerhö -ger8ins -ge1ro -6g5erz. -ge1rä -ge1rü -ge1s -ges2p -ge2s7te. -ge2s7ten -ge2s7ter -ge2s7tik -ge5unt -4g3ex3 -2g1f8 -2g1g -g1ha -6g1hei -5ghel. -g5henn -6g1hi -g1ho -1ghr -g1hö -1gi -gi5la -gi8me. -gi1na -4g3ins -gis1tr -g1j -2g1k -8gl. -1glad -g5lag -glan4z3 -1glas -6glass -5glaub -g3lauf -1gle. -g5leb -3gleic -g3lein -5gleis -1glem -2gler -8g3leu -gli8a -g2lie -3glied -1g2lik -1g2lim -g6lio -1gloa -5glom -1glon -1glop -g1los -g4loss -g5luf -1g2ly -1glü -2g1m -gn8 -6gn. -1gna -8gnach -2gnah -g1nas -g8neu -g2nie -g3nis -1gno -8gnot -1go -goe1 -8gof -2gog -5gogr -6g5oh -goni5e -6gonist -go1ra -8gord -2g1p2 -g1q -1gr4 -g5rahm -gra8m -gra4s3t -6g1rec -gre6ge -4g3reic -g5reit -8grenn -gri4e -g5riem -5grif -2grig -g5ring -6groh -2grot -gro6ß -4grut -2gs -gs1ab -g5sah -gs1ak -gs1an -gs8and -gs1ar -gs1au -g1sc -gs1ef -g5seil -gs5ein -g2s1er -gs1in -g2s1o -gso2r -gs1pr -g2s1u -2g1t -g3te -g2t1h -1gu -gu5as -gu2e -2gue. -6gued -4g3uh -8gums -6g5unt -gut3h -gu2tu -4g1v -2g1w -gy1n -g1z -1gä -8gä8m -6gärm -1gö -1gü -6güb -1haa -hab8r -ha8del -hade4n -8hae -ha5el. -haf6tr -2hal. -ha1la -hal4b5a -6hale -8han. -ha1na -han6dr -han6ge. -2hani -h5anth -6hanz -6harb -h3arbe -h3arme -ha5ro -ha2t1h -h1atm -hau6san -ha8ß -h1b2 -h1c -h1d -he2bl -he3cho -h3echt -he5d6s -5heft -h5e6he. -hei8ds -h1eif -2hein -he3ism -he5ist. -heit8s3 -hek6ta -hel8lau -8helt -he6mer -1hemm -6h1emp -hen5end -hen5klo -hen6tri -he2nu -8heo -he8q -her3ab -he5rak -her3an -4herap -her3au -h3erbi -he1ro -he8ro8b -he4r3um -her6z5er -he4spe -he1st -heta6 -het5am -he5th -heu3sc -he1xa -hey5e -h1f2 -h1g -hgol8 -h1h -h1iat -hie6r5i -hi5kt -hil1a2 -hil4fr -hi5nak -hin4ta -hi2nu -hi5ob -hirn5e -hir6ner -hi1sp -hi1th -hi5tr -5hitz -h1j -h6jo -h1k2 -hlabb4 -hla4ga -hla6gr -h5lai -hl8am -h1las -h1laß -hl1c -h1led -h3lein -h5ler. -h2lif -h2lim -h8linf -hl5int -h2lip -h2lit -h4lor -h3lose -h1läs -hme5e -h2nee -h2nei -hn3eig -h2nel -hne8n -hne4p3f -hn8erz -h6netz -h2nip -h2nit -h1nol -hn5sp -h2nuc -h2nud -h2nul -hoch1 -1hoh -hoh8lei -2hoi -ho4l3ar -1holz -h2on -ho1ra -6horg -5horn. -ho3sl -hos1p -ho4spi -h1p -hpi6 -h1q -6hr -h1rai -h8rank -h5raum -hr1c -hrcre8 -h1red -h3reg -h8rei. -h4r3erb -h8rert -hrg2 -h1ric -hr5ins -h2rom -hr6t5erl -hr2t1h -hr6t5ra -hr8tri -h6rum -hr1z -hs3ach -h6s5amt -h1sc -h6s5ec -h6s5erl -hs8erle -h4sob -h1sp -h8spaß -h8spel -hs6po -h4spun -h1str -h4s3tum -hs3und -h1sü -h5ta. -h5tab -ht3ac -ht1ak -ht3ang -h5tanz -ht1ar -ht1at -h5taub -h1te -h2t1ec -ht3eff -ht3ehe -h4t3eif -h8teim -h4t3ein -ht3eis -h6temp -h8tentf -hte8ren -h6terfü -h8tergr -h4t3erh -h6t5ersc -h8terst -h8tese -h8tess -h2t1eu -h4t3ex -ht1he -ht5hu -h1ti -ht5rak -hts3ah -ht1sc -ht6sex -ht8sk -ht8so -h1tu -htz8 -h5tüm -hub5l -hu6b5r -huh1l -h5uhr. -huld5a6 -hu8lent -hu8lä -h5up. -h1v -h5weib -h3weis -h1z -hä8kl -häl8s -häma8tu8 -hä8sche. -hät1s -häu4s3c -2hö. -2höe -8höi -hö6s -hös5c -hühne6 -hül4s3t -hütte8re -i5adn -i1af -i5ak. -i1al. -i1al1a -i1alb -i1ald -i5alei -i1alf -i1alg -i3alh -i1alk -i1all -i1alp -i1alr -i1als -i1alt -i1alv -i5alw -i3alz -i1an. -ia5na -i3and -ian8e -ia8ne8b -i1ang -i3ank -i5ann -i1ant -i1anz -i6apo -i1ar. -ia6rab -i5arr -i1as. -i1asm -i1ass -i5ast. -i1at. -i5ats -i1au -i5azz -i6b5eig -i6b5eis -ib2le -i4blis -i6brig -i6b5unt -i6büb -i1che -ich5ei -i6cherb -i1chi -ich5ins -ich1l -ich3m -ich1n -i1cho -icht5an -icht3r -i1chu -ich1w -ick6s5te -ic5l -i1d -id3arm -3ideal -ide8na -3ideol -ide5rö -i6diot -id5rec -id1t -ie1a -ie6b5ar -iebe4s3 -ie2bl -ieb1r -ie8bra -ie4bre -ie8bä -ie2dr -ie1e8 -ie6f5ad -ief5f -ie2f1l -ie4fro -ief1t -i1ei -ie4l3ec -ie8lei -ie4lek -i3ell -i1en. -i1end -ien6e -i3enf -i5enn -ien6ne. -i1enp -i1enr -i5ensa -ien8stal -i5env -i1enz -ie5o -ier3a4b -ie4rap -i2ere -ie4rec -ie6r5ein -ie6r5eis -ier8er -i3ern. -ie8rum -ie8rund -ie6s5che -ie6tau -ie8tert -ie5the -ie6t5ri -i1ett -ie5un -iex5 -2if -i1fa -if5ang -i6fau -if1fr -if5lac -i5f6lie -i1fre -ift5a -if6t5r -ig3art -2ige -i8gess -ig5he -i5gla -ig2ni -i5go -ig3rot -ig3s2p -i1ha -i8ham -i8hans -i1he -i1hi -ih1n -ih1r -i1hu -i8hum -ih1w -8i1i -ii2s -ii2t -i1j -i1k -i6kak -i8kerz -i6kes -ik4ler -i6k5unt -2il -i5lac -i1lag -il3ans -i5las -i1lau -il6auf -i1le -ile8h -i8lel -il2fl -il3ipp -il6l5enn -i1lo -ilt8e -i1lu -i1lä -i8mart -imb2 -i8mele -i8mid -imme6l5a -i1mu -i1mä -i5mö -ina5he -i1nat -in1au -inau8s -8ind. -in4d3an -5index -ind2r -3indus -i5nec -i2n1ei -i8nerw -3infek -1info -5ingeni -ing5s6o -5inhab -ini5er. -5inj -in8kät -in8nan -i1no -inoi8d -in3o4ku -in5sau -in1sp -5inspe -5instit -5instru -ins4ze -5intere -5interv -in3the -in5t2r -i5ny -inä2 -i1när -in1äs -inö8 -in5öd -i1nös -2io -io1a8 -io1c -iode4 -io2di -ioi8 -i1ol. -i1om. -i1on. -i5onb -ion2s1 -i1ont -i5ops -i5o8pt -i1or. -i3oral -io3rat -i5orc -i1os. -i1ot. -i1o8x -2ip -i1pa -i1pi -i1p2l -i1pr -i1q -i1ra -ir6bl -i1re -i1ri -ir8me8d -ir2m1o2 -ir8nak -i1ro -ir5rho -ir6schl -ir6sch5r -i5rus -i5ry -i5rä -i1sa -i8samt -i6sar -i2s1au -i8scheh -i8schei -isch5m -isch3r -ischä8 -is8ele -ise3ra -i4s3erh -is3err -isi6de -i8sind -is4kop -ison5e -is6por -i8s5tum -i5sty -i5sö -i1ta -it5ab. -i2t1a2m -i8tax -i1te -i8tersc -i1thi -i1tho -i5thr -it8hä -i1ti -i8ti8d -iti6kl -itmen4 -i1to -i8tof -it3ran -it3rau -i1tri -itri5o -it1sc -it2se -it5spa -it8tru -i1tu -it6z5erg -it6z1w -i1tä -itä6r5e -ität2 -itäts5 -i1tü -i1u -iu6r -2i1v -i6vad -iva8tin -i8vei -i6v5ene -i8verh -i2vob -i8vur -i1w -iwi2 -i5xa -i1xe -i1z -ize8n -i8zir -i6z5w -iä8m -i1ä6r -i5ät. -i5äv -i1ö8 -iü8 -i6ß5ers -ja5la -je2t3r -6jm -5jo -jo5as -jo1ra -jou6l -ju5cha -jugen4 -jugend5 -jung5s6 -3jä -1ka -8kachs -8kakz -ka1la -kal5d -kam5t -ka1na -2kanl -8kapf -ka6pl -ka5r6a -6k3arbe -ka1ro -kar6p5f -4k3arti -8karz -ka1rä -kasi5e -ka6teb -kat8ta -kauf6s -kau3t2 -2k1b -2k1c -4k1d -kehr6s -kehrs5a -8keic -2k1eig -6k5ein -6k5eis -ke6lar -ke8leis -ke8lo -8kemp -k5ente. -k3entf -8k5ents -6kentz -ke1ra -k5erlau -2k1f8 -2k1g -2k1h -ki5fl -8kik -king6s5 -6kinh -ki5os -ki5sp -ki5th -8ki8ö -2k1k2 -kl8 -1kla -8klac -k5lager -kle4br -k3leib -3kleid -kle5isc -4k3leit -k3lek -6k5ler. -5klet -2klic -8klig -k2lim -k2lin -5klip -5klop -k3lor -1klä -2k1m -kmani5e -kn8 -6kner -k2ni -knä8 -1k2o -ko1a2 -ko6de. -ko1i -koi8t -ko6min -ko1op -ko1or -ko6pht -ko3ra -kor6d5er -ko5ru -ko5t6sc -k3ou -3kow -6k5ox -2k1p2 -k1q -1kr8 -4k3rad -2k1rec -4k3reic -kre5ie -2krib -6krig -2krip -6kroba -2ks -k1sa -k6sab -ksal8s -k8samt -k6san -k1sc -k2s1ex -k5spat -k5spe -k8spil -ks6por -k1spr -kst8 -k2s1uf -2k1t -kta8l -kt5a6re -k8tein -kte8re -k2t1h -k8tinf -kt3rec -kt1s -1ku -ku1ch -kuck8 -k3uhr -ku5ie -kum2s1 -kunfts5 -kun2s -kunst3 -ku8rau -ku4ro -kurz1 -4kusti -ku1ta -ku8ß -6k1v -2k1w -ky5n -2k1z -1kä -kä4m -4k3ämi -käse5 -1kö -kö1c -kö1s -1kü -kü1c -kür6sc -1la. -8labf -8labh -lab2r -2l1abs -lach3r -la8dr -5ladu -8ladv -6laff -laf5t -la2gn -5laken -8lamb -la6mer -5lampe. -2l1amt -la1na -1land -lan4d3a -lan4d3r -lan4gr -8lanme -6lann -8lanw -6lanä -8lappa -lap8pl -lap6pr -l8ar. -la5ra -lar4af -la8rag -la8ran -la6r5a6s -l3arbe -la8rei -6larm. -la8sa -la1sc -la8sta -lat8i -6l5atm -4lauss -4lauto -1law -2lb -l8bab -l8bauf -l8bede -l4b3ins -l5blo -lbst5an -lbst3e -8lc -l1che -l8chert -l1chi -lch3m -l5cho -lch5w -6ld -l4d3ei -ld1re -l6düb -le2bl -le8bre -lecht6s5 -led2r -6leff -le4gas -1lehr -lei6br -le8inf -8leinn -5leistu -4lektr -le6l5ers -lemo2 -8lemp -l8en. -8lends -6lendun -le8nend -len8erw -6l5ents -4l3entw -4lentz -8lenzy -8leoz -6lepi -le6pip -8lepo -1ler -l6er. -8lerbs -6l5erde -le8reis -le8rend -le4r3er -4l3erg -l8ergr -6lerkl -6l5erzie -8lerö -8lesel -lesi5e -le3sko -le3tha -let1s -5leuc -4leuro -leu4s3t -le5xe -6lexp -l1f -2l1g -lgend8 -l8gh -lglie3 -lglied6 -6l1h -1li -li1ar -li1as -2lick -li8dr -li1en -lien6n -li8ers -li8ert -2ließ -3lig -li8ga8b -li1g6n -li1l8a -8limb -li1na -4l3indu -lings5 -4l3inh -6linj -link4s3 -4linkt -2lint -8linv -4lipp -5lipt -4lisam -livi5e -6l1j -6l1k -l8keim -l8kj -lk2l -lko8f -lkor8 -lk2sa -lk2se -6ll -l1la -ll3a4be -l8labt -ll8anl -ll1b -ll1c -ll1d6 -l1le -l4l3eim -l6l5eise -ller3a -l4leti -l5lip -l1lo -ll3ort -ll5ov -ll6spr -llte8 -l1lu -ll3urg -l1lä -l5lü -l6lüb -2l1m -l6m5o6d -6ln -l1na -l1no -8lobl -lo6br -3loch. -l5o4fen -5loge. -5lohn -4l3ohr -1lok -l2on -4l3o4per -lo1ra -2l1ord -6lorg -4lort -lo1ru -1los. -lo8sei -3losig -lo6ve -lowi5 -6l1p -lp2f -l8pho -l8pn -lp4s3te -l2pt -l1q -8l1r -2ls -l1sa -l6sarm -l1sc -l8sec -l6s5erg -l4s3ers -l8sh -l5s6la -l1sp -ls4por -ls2pu -l1str -l8suni -l1sü -2l1t -lt5amp -l4t3ein -l5ten -l6t5eng -l6t5erp -l4t3hei -lt3her -l2t1ho -l6t5i6b -lti1l -l8trö -lt1sc -lt6ser -lt4s3o -lt5ums -lu8br -lu2dr -lu1en8 -8lu8fe -luft3a -luf8tr -lu6g5r -2luh -l1uhr -lu5it -5luk -2l1umf -2l1umw -1lun -6l5u6nio -4l3unte -lu5ol -4lurg -6lurs -l3urt -lu4sto -lus1tr -lu6st5re -lu8su -lu6tal -lu6t5e6g -lu8terg -lu3the -lu6t5or -lu2t1r -lu6ß5 -l1v -lve5r6u -2l1w -1ly -lya6 -6lymp -ly1no -l8zess -l8zo8f -l3zwei -lz5wu -3länd -lä5on -lä6sc -lät1s -5läuf -2läug -läu6s5c -lä5v -l1öl -1lös -lö1ß6t -6l1übe -1ma -8mabg -ma5chan -mad2 -ma5el -4magg -mag8n -ma1la -ma8lau -mal5d -8malde -mali5e -malu8 -ma8lut -2m1amp -3man -mand2 -man3ds -8mangr -mani5o -8m5anst -6mappa -4m3arbe -mar8kr -ma1r4o -mar8schm -3mas -ma1sc -ma1tö -4m5auf -ma5yo -2m1b -mb6r -2m1c -2m1d -md6sä -1me -me1ch -me5isc -5meld -mel8sa -8memp -me5nal -men4dr -men8schl -men8schw -8mentsp -me1ra -mer4gl -me1ro -3mes -me6s5ei -me1th -me8ß -2m1f6 -2m1g -2m1h -1mi -mi1a -mi6ale -mi1la -2m1imm -mi1na -mi5nü -mi4s3an -mit1h -mi5t6ra -3mitt -mitta8 -mi6ß5 -6mj -2m1k8 -2m1l -2m1m -m6mad -m6m5ak -m8menth -m8mentw -mme6ra -m2mn -mm5sp -mm5ums -mmut5s -m8män -m1n8 -m5ni -1mo -mo5ar -mo4dr -8mof -mo8gal -mo4kla -mol5d -m2on -mon8do -mo4n3od -mon2s1tr -mont8a -6m5ony -mopa6 -mo1ra -mor8d5a -mo1sc -mo1sp -5mot -moy5 -2mp -m1pa -mpfa6 -mpf3l -mphe6 -m1pi -mpin6 -m1pl -mp2li -m2plu -mpo8ste -m1pr -mprä5 -mp8th -mput6 -mpu5ts -m1pö -8m1q -2m1r -2ms -ms5au -m1sc -msch4l -ms6po -m3spri -m1str -2m1t -mt1ar -m8tein -m2t1h -mt6se -mt8sä -mu5e -6m5uh -mumi1 -1mun -mun6dr -muse5e -mu1ta -2m1v -mvol2 -mvoll3 -2m1w -1my -2m1z -mä6kl -1män -mä1s -mä5tr -mäu4s3c -3mäß -möb2 -6möl -1mü -5mün -3müt -1na. -n5ab. -8nabn -n1abs -n1abz -na6bä -na2c -nach3e -3nacht -1nae -na5el -n1afr -1nag -1n2ah -na8ha -na8ho -1nai -6nair -na4kol -n1akt -nal1a -8naly -1nama -na4mer -na1mn -n1amp -8n1amt -5nanc -nan6ce -n1and -n6and. -2n1ang -1nani -1nann -n1ans -8nanw -5napf. -1n2ar. -na2ra -2n1arc -n8ard -1nari -n8ark -6n1arm -5n6ars -2n1art -n8arv -6natm -nat6s5e -1naue -4nauf -n3aug -5naui -n5auk -na5um -6nausb -6nauto -1nav -2nax -3naz -1naß -n1b2 -nbau5s -n1c -nche5e -nch5m -2n1d -nda8d -n2d1ak -nd5ans -n2d1ei -nde8lac -ndel6sa -n8derhi -nde4se -nde8stal -n2dj -ndnis5 -n6d5or6t -nd3rec -nd3rot -nd8samt -nd6sau -ndt1h -n8dumd -1ne -ne5as -ne2bl -6n5ebn -2nec -5neei -ne5en -ne1g4l -2negy -4n1ein -8neis -4n3e4lem -8nemb -2n1emp -nen1a -6n5energ -nen3k -8nentb -4n3en3th -8nentl -8n5entn -8n5ents -ne1ra -ne5r8al -ne8ras -8nerbi -6n5erde. -nere5i6d -nerfor6 -6n5erhö -8nerlö -2n1err -n8ers. -6n5ertra -2n1erz -nesi3e -net1h -neu4ra -neu5sc -8neuß -n1f -nf5f -nf2l -nflei8 -nf5lin -nft8st -n8g5ac -ng5d -ng8en -nge8ram -ngg2 -ng1h -n6glic -ng3rip -ng8ru -ng2se4 -ng2si -n2g1um -n1gy -n8gäl -n1h -nhe6r5e -1ni -ni1bl -ni5chä -ni8dee -n6ie -ni1en -nie6s5te -niet5h -ni8etn -4n3i6gel -n6ik -ni1la -2n1imp -ni5na -2n1ind -8ninf -6n5inh -ni8nit -6n5inn -2n1ins -4n1int -n6is -nis1tr -ni1th -ni1tr -n1j -n6ji -n8kad -nk5ans -n1ke -n8kerla -n1ki -nk5inh -n5klö -n1k2n -n8k5not -nk3rot -n8krü -nk5spo -nk6t5r -n8kuh -n6küb -n5l6 -nli4mi -n1m -nmen4s -n1na -n8nerg -nni5o -n1no -nn4t3ak -nnt1h -nnu1e -n1ny -n1nä -n1nö -n1nü -no5a -no4b3la -4n3obs -2nobt -noche8 -no6die -no4dis -no8ia -no5isc -6n5o6leu -no4mal -noni6er -2n1onk -n1ony -4n3o4per -6nopf -6nopti -no3ra -no4ram -nor6da -4n1org -2n1ort -n6os -no1st -8nost. -no8tan -no8ter -noty6pe -6n5ox -n1p2 -n1q -n1r -nrös3 -6ns -n1sac -ns3ang -n1sc -n8self -n8s5erf -n8serg -n6serk -ns5erw -n8sint -n1s2pe -n1spr -n6s5tat. -n6stob -n1str -n1ta -n4t3a4go -nt5anh -nt3ark -nt3art -n1te -nt3eis -nte5n6ar -nte8nei -nter3a -nte6rei -nt1ha -nt6har -n3ther -nt5hie -n3thus -n1ti -nti1c -n8tinh -nti1t -ntlo6b -ntmen8 -n1to -nt3o4ti -n1tr -ntra5f -ntra5ut -nt8rea -nt3rec -nt8rep -n4t3rin -nt8rop -n4t3rot -n4trü -nt1s -nts6an -nt2sk -n1tu -nt1z -n1tä -n1tö -n8töl -n1tü -1nu -nu1a -nu5el -nu5en -4n1uhr -nu5ie -8numl -6n5ums -6n5umw -2n1und -6nuni -6n5unr -2n1unt -2nup -2nu6r -n5uri -nu3skr -nu5ta -n1v -8n1w -1nys -n1za -n6zab -n2z1ar -n6zaus -nzi4ga -n8zof -n6z5unt -n1zw -n6zwir -1näc -5näe -5näi -n8äl -nä6m -nä6re -n5ärz -5näus -n1öl -1nöt -n5öz -5nü. -6n1ü2b -5nüß -o5ab. -oa2l -o8ala -o1a2m -o1an -ob1ac -obe4ra -o6berh -5o4bers -o4beru -obe6ser -1obj -o1bl -o2bli -ob5sk -3obst. -ob8sta -obst5re -ob5sz -o1che -oche8b -o8chec -o3chi -och1l -och3m -ocho8f -o3chro -och3to -o3chu -och1w -o1d -o2d1ag -od2dr -ode5i -ode6n5e -od1tr -o5e6b -o5e6der. -oe8du -o1ef -o1e2l -o1e2p -o1er. -o5e8x -o1fa -of8fan -1offi -of8fin -of6f5la -o5fla -o1fr -8o1g -og2n -o1ha -o1he -o6h5eis -o1hi -ohl1a -oh1le -oh4l3er -5ohm. -oh2ni -o1ho -oh1re -oh1ru -o1hu -oh1w -o1hy -o1hä -o5ia -o1id. -o8idi -oi8dr -o5ids -o5isch. -oiset6 -o1ism -o3ist. -o5i6tu -o1j -o1k -ok2l -ok3lau -o8klä -1okta -o1la -old5am -old5r -o1le -ole5in -ole1r -ole3u -ol6gl -ol2kl -olk4s1 -ol8lak -ol8lauf. -ol6lel -ol8less -o1lo -ol1s -ol2ster -ol6sk -o1lu -oly1e2 -5olym -o2mab -om6an -o8mau -ombe4 -o8merz -om5sp -o1mu -o8munt -o1mä -o1mö -o1na -ona8m -on1ax -on8ent -o6n5erb -8oni -oni5er. -on1k -on6n5a6b -o1no -ono1c -o4nokt -1ons -onts8 -o1nä -oo8f -1oog -oo2pe -oo2sa -o1pa -3o4pera -o3pfli -opf3lo -opf3r -o1pi -o1pl -o2pli -o5p6n -op8pa -op6pl -o1pr -o3p4ter -1opti -o1pä -o5pö -o1q -o1ra. -o3rad -o8radd -1oram -o6rang -o5ras -o8rauf -or5cha -or4d3a4m -or8dei -or8deu -1ordn -or4dos -o1re -o5re. -ore2h -o8r5ein -ore5isc -or6enn -or8fla -or8fli -1orga -5orgel. -or2gl -o1ri -5o6rient -or8nan -or8nä -o1ro -or1r2h -or6t5an -or8tau -or8tere -o1rus -o1ry -o1rä -or1ü2 -o1sa -osa3i -6ose -o8serk -o1sk -o6ske -o6ski -os2kl -os2ko -os2kr -osni5e -o2s1o2d -o3s4per -o4stam -o6stau -o3stra -ost3re -osu6 -o6s5ur -o5s6ze -o1ta -ot3auf -o6taus -o1te -o6terw -o1th -othe5u -o2th1r -o1ti -o1to -oto1a -ot1re -o1tri -o1tro -ot1sc -o3tsu -ot6t5erg -ot2t3h -ot2t5r -ot8tö -o1tu -ou3e -ouf1 -ou5f6l -o5u6gr -ou5ie -ou6rar -ou1t6a -o1v -o1wa -o1we -o6wer. -o1wi -owid6 -o1wo -o5wu -o1xe -oy5al. -oy1e -oy1i -o5yo -o1z -oza2r -1o2zea -ozo3is -oö8 -oß5elt -oß1t -3paa -pa6ce -5pad -pag2 -1pak -pa1la -pa8na8t -pani5el -pa4nor -pan1s2 -1pap -pap8s -pa8rei -par8kr -paro8n -par5o6ti -part8e -5partei -3partn -pas6sep -pa4tha -1pau -6paug -pau3sc -p1b -8p5c -4p1d -1pe -4peic -pe5isc -2pek -pen3k -pen8to8 -p8er -pe1ra -pere6 -per5ea -per5eb -pe4rem -2perr -per8ran -3pers -4persi -pe3rü -pe4sta -pet2s -p2f1ec -p4fei -pf1f -pf2l -5pflanz -pf8leg -pf3lei -2pft -pf3ta -p1g -1ph -2ph. -2p1haf -6phb -8phd -6p5heit -ph5eme -6phg -phi6e -8phk -6phn -p5holl -pht2 -ph3tha -4ph3the -phu6 -6phz -pi1en -pi5err -pi1la -pi1na -5pinse -pioni8e -1pis -pi1s2k -pi1th -p1k -pl8 -5pla -p2lau -4plei -p3lein -2pler -6p5les -2plig -p6lik -6p5ling -p2liz -plo8min -6p1m -p1n -1p2o -8poh -5pol -po8lan -poly1 -po3ny -po1ra -2porn -por4t3h -po5rö -5poti -p1pa -p6p5ei -ppe6la -pp5f -p2p1h -p1pi -pp1l -ppp6 -pp5ren -pp1s -pp2ste -p5pö -pr6 -3preis -1pres -2p3rig -5prinz -1prob -1prod -5prog -pro8pt -pro6t5a -prote5i -8proß -prä3l -1präs -präte4 -1prüf -p5schl -2pst -1p2sy -p1t -p8to8d -pt1s -5p6ty -1pu -pu1b2 -2puc -pu2dr -puf8fr -6p5uh -pun8s -pu8rei -pu5s6h -pu1ta -p1v -p3w -5py -py5l -p1z -pä6der -p5ä6m -pä8nu -8pär -pät5h -pät1s -qu6 -1qui -8rabk -ra6bla -3rable -ra2br -r1abt -6rabz -ra4dan -ra2dr -5rafal -ra4f3er -ra5gla -ra2g3n -6raha -ral5am -5rald -4ralg -ra8lins -2rall -ral5t -8ramei -r3anal -r6and -ran8der -ran4dr -8ranf -6ranga -5rangi -ran8gli -r3angr -rans5pa -8ranw -r8anz. -ra5or -6rapf -ra5pl -rap6s5er -2r1arb -1rarh -r1arm -ra5ro -2r1art -6r1arz -ra8tei -ra6t5he -6ratl -ra4t3ro -r5atta -raue4n -6raus. -r5austa -rau8tel -raut5s -ray1 -r1b -rb5lass -r6bler -rb4lie -rbon6n -r8brecht -rb6s5tä -r8ces -r1che -rch1l -rch3m -rch3re -rch3tr -rch1w -8rd -r1da -r8dachs -r8dap -rda5ro -rde5ins -rdio5 -r8dir -rd3ost -r1dr -r8drau -1re. -re1ak -3reakt -re3als -re6am. -re1as -4reben -re6bl -rech5a -r8edi -re3er -8reff -3refl -2reh -5reha -r4ei. -reich6s5 -8reier -6reign -re5imp -4r3eina -6r3einb -6reing -6r5einn -6reinr -4r3eins -r3eint -reli3e -8r5elt -6rempf -2remt -ren5a6b -ren8gl -r3enni -1reno -5rente -4r3enth -8rentl -4r3entw -8rentz -ren4zw -re1on -requi5 -1rer -rer4bl -6rerbs -4r3erd -8rerhö -8rerkl -4r3erla -8rerlö -4r3erns -6r5ernä -rer5o -6r5erreg -r5ertr -r5erwec -r5erö -re2sa -re8schm -2ress -re5u8ni -6rewo -2r1ex -r1f -r8ferd -rf4lie -8r1g -r8gah -rge4bl -rge5na -rgest4 -rg6ne -r2gni2 -r8gob -r4g3ret -rg8sel -r1h8 -r2hy -5rhyt -ri1ar -ri5cha -rid2g -r2ie -rieg4s5 -ri8ei -ri1el -ri6ele -ri1en -ri3er. -ri5ers. -ri6fan -ri8fer -ri8fr -1r2ig -ri8kn -ri5la -rimä8 -ri1na -r8inde -rin4ga -rin6gr -1rinn -6rinner -rino1 -r8insp -4rinst -ri1nä -ri5o6ch -ri1o2d -ri3o6st -2r1ir -r2is -ri3sko -ri8spr -ri5sv -r2it -6r5i6tal -ri5tr -ri6ve. -8r1j -6rk -r1ke -rkehrs5 -r1ki -r3klin -r1k2n -rk3str -rk4t3an -rk6to -r6kuh -rkä4s3t -r1l -r5li -rline5a -6r1m -r6manl -rma4p -r4m3aph -r8minf -r8mob -rm5sa -2rn -r1na -rna8be -r5ne -rn2ei -r6neif -r6nex -r6nh -rn1k -r1no -r6n5oc -rn1sp -r1nä -r1nü -ro6bern -6robs -ro1ch -3rock. -ro5de -ro1e -4rofe -ro8hert -1rohr -ro5id -ro1in -ro5isc -6rolym -r2on -6roog -ro6phan -r3ort -ro1s2p -ro5s6w -ro4tau -ro1tr -ro6ts -5rout -r1p -rpe8re -rp2f -r2ps -r2pt -r1q -2rr -r1ra -r1re -rrer6 -rr6hos -r5rhö -r1ri -r1ro -rro8f -rr8or -rror5a -r1ru -r3ry -r1rä -r1rö -r1rü -2r1s -r2ste -r2sti -r6sab -r4sanf -rse6e -rse5na -r2sh -r6ska -r6ski -rs2kl -r8sko -r2sl -rs2p -r6stauf -r8sterw -r8stran -rswi3d4 -r2sz -2r1t -rt3art -r8taut -r5tei -rt5eige -r8tepe -r4t3erh -r8terla -r4t3hei -r5t6hu -r4t3int -rt5reif -rt1sc -rt6ser -rt6s5o -rt6s5u -rt5und -r8turt -rube6 -ru1en -1r4uf -ruf4st -ru1ie -2r1umg -2r1uml -2rums -run8der -run4d5r -6rundz -6runf -8runs -2r1unt -2r1ur -r6us -ru6sta -rus1tr -ru6tr -1ruts -r1v -rven1 -rvi2c -r1w -r1x -r1za -rz5ac -r6z5al -r8z1ar -r8zerd -r6z5erf -rz8erh -rz4t3h -r8zum -rä4ste -räu8sc -r1öf -5röhr -rö5le -3röll -5römis -r1ör -rö2sc -3rümp -1sa. -1saa -s3a4ben -sa2bl -2s1abs -6s1abt -6sabw -3sack. -6s3a4der -1saf -sa1fa -4s1aff -sa5fr -1sag -1sai -sa1i2k1 -4s1akt -1sal -sa1la -4s3alpi -6salter -salz3a -1sam -s5anb -san2c -1sand -s5angeh -6sanl -2s1ans -6s3antr -8s1anw -s1ap -s6aph -8sapo -sap5p6 -s8ar. -2s1arb -3sarg -s1arm -sa5ro -2s1art -6s1arz -1sas -1sat -sat8a -2s1atl -sa8tom -3s8aue -s5auff -sau5i -s6aur -2s1aus -5s6ause -2s1b2 -2sca -s4ce -8sch. -3scha. -5schade -3schaf -3schal -sch5ame -8schanc -8schb -1sche -6schef -8schex -2schf -2schg -2schh -1schi -2schk -5schlag -5schlu -6schmäß -6schnaß -1scho -6schord -6schp -3schri -8schric -8schrig -8schrou -6schs -2scht -sch3ta -sch3tr -1schu -8schunt -6schv -2schz -5schö -5schü -2sco -scre6 -6scu -2s1d -1se -se5an -se1ap -se6ben -se5ec -see5i6g -se3erl -8seff -se6han -se8hi -se8hö -6s5eid. -2s1eig -s8eil -5sein. -sei5n6e -6s5einh -3s8eit -3sel. -se4lar -selb4 -6s3e4lem -se8lerl -2s1emp -sen3ac -se5nec -6s5ents -4sentz -s8er. -se8reim -ser5inn -8sermä -8s5erzi -6seröf -se1um -8sexa -6sexp -2s1f2 -sfal8ler -2s3g2 -sge5b2 -s1h -s8hew -5s6hip -5s4hop -1si -2siat -si1b -sicht6s -6s5i6dee -siege6s5 -si1en -si5err -si1f2 -si1g2n -si6g5r -si8kau -sik1i -si4kin -si2kl -si8kü -si1la -sil6br -si1na -2s1inf -sin5gh -2s1inh -sinne6s5 -2s1ins -si5ru -si5str -4s1j -s1k2 -6sk. -2skau -skel6c -skelch5 -s6kele -1s2ki. -3s4kin. -s6kiz -s8kj -6skn -2skow -3skrib -3skrip -2sku -8skü -s1l -s8lal -slei3t -s4low -2s1m -s1n -6sna -6snot -1so -so1ch -2s1odo -so4dor -6s5o4fen -solo3 -s2on -so5of -4sope -so1ra -2s1ord -4sorga -sou5c -so3un -4s3ox -sp2 -8spaa -5spal -1span -2spap -s2pec -s4peis -1spek -s6perg -4spers -s6pes -2s1pf -8sphi -1s2phä -1spi -spi4e -6s5pig -6spinse -2spis -2spla -2spol -5s6pom -6s5pos -6spoti -1spra -3s8prec -6spreis -5spring -6sprob -1spru -s2pul -1s2pur -6spy -5spän -1spü -s1q -2s1r -2ssa -2sse -2ssi -2sso -2ssä -2ssö -2ssü -2s1sch -sse8nu -ssini6s -ssoi6r -2st. -1sta -4stafe -2stag -sta3la -6stale -4s2talg -8stalk -8stamt -6st5anf -4stans -6stanw -6starb -sta4te -6staus -2stb -6stc -6std -s1te -4steil -6steppi -8stesse -6stf -2stg -2sth -st1ha -st3hei -s8t1hi -st1ho -st5hu -s1ti -s2ti4el -4s2tigm -6s2tind -4s2tinf -s2ti8r -2stk -2stl -2stm -1sto -6stoll. -4st3ope -6stopf. -6stord -6stp -4strai -s3tral -6s5traum -3straß -3strec -6s3tref -8streib -5streif -6streno -6stres -6strev -2st5rig -8s2t1ris -s8troma -st5rose -4struf -3strum -6sträg -2st1s6 -2stt -1stu -stu5a -4stuc -2stue -8stun. -2stv -2stw -s2tyl -6stz -1stä -8stäg -1stö -1stü -8stüch -4stür. -1su -su2b1 -3suc -su1e -su2fe -su8mar -6sumfa -8sumk -2s1unt -sup1p2 -6s5u6ran -6surte -2s1v -2s1w -1sy -8syl. -sy5la -syn1 -sy2na -syne4 -s1z -s4zend -5s6zene. -8szu -1sä -6s5änd -6säugi -6säuß -5söm -2s1ü2b -1süc -sü8di -1sün -5süß -taats3 -4tab. -taba6k -ta8ban -tab2l -ta6bre -4tabs -t3absc -8tabz -6t3acht -ta6der -6tadr -tad6s -tad2t -1tafe4 -1tag -ta6ga6 -ta8gei -tage4s -tag6s5t -tah8 -tahl3 -tai6ne. -ta5ir. -tak8ta -tal3au -1tale -ta8leng -tal5ert -6t5a6mer -6tamp -tampe6 -2t1amt -tan5d6a -tan8dr -tands5a -tani5e -6tanl -2tanr -t3ans -8t5antr -tanu6 -t5anw -8tanwa -tan8zw -ta8rau -6tarbe -1tari -2tark -2t1arm -ta1ro -2tart -t3arti -6tarz -ta1sc -ta6sien -ta8stem -ta8sto -t5aufb -4taufn -8taus. -5tause -8tausf -6tausg -t5ausl -2t1b2 -2t1c -t6chu -2t1d -te2am -tea4s -te8ben -5techn -4teff -te4g3re -te6hau -2tehe -te4hel -2t1ehr -te5id. -teig5l -6teign -tei8gr -1teil -4teinh -t5einhe -4teis -t5eisen -8teiw -te8lam -te4lar -4telek -8telem -te6man -te6n5ag -ten8erw -ten5k -tens4p -ten8tro -4t3entw -8tentz -te6pli -5teppi -ter5a6b -te3ral -ter5au -8terbar -t5erbe. -6terben -8terbs -4t3erbt -t5erde. -ter5ebe -ter5ein -te8rers -terf4 -8terhö -6terklä -ter8nor -ter6re. -t8erscha -t5e6sel -te8stau -t3euro -te1xa -tex3e -8texp -tex6ta -2t1f2 -2t1g2 -2th. -th6a -5tha. -2thaa -6t1hab -6t5haf -t5hah -8thak -3thal. -6thals -6t3hand -2t1hau -1the. -3t4hea -t1heb -t5heil -t3heit -t3helf -1theo -5therap -5therf -6t5herz -1thes -1thet -5thi. -2t1hil -t3him -8thir -3this -t5hj -2th1l -2th1m -th1n -t5hob -t5hof -4tholz -6thopti -1thr6 -4ths -t1hum -1thy -4t1hä -2t1hö -t1hü -ti1a2m -ti1b -tie6fer -ti1en -ti8gerz -tig3l -ti8kin -ti5lat -1tilg -t1ind -tin4k3l -ti3spa -ti5str -5tite -ti5tr -ti8vel -ti8vr -2t1j -2t1k2 -2t1l -tl8a -2t1m8 -2t1n -3tobe -8tobj -to3cha -5tocht -8tock -tode4 -to8del -to8du -to1e -6t5o6fen -to1in -toi6r -5toll. -to8mene -t2ons -2t1ony -to4per -5topf. -6topt -to1ra -to1s -to2ste -to6ska -tos2l -2toti -to1tr -t8ou -2t1p2 -6t1q -tr6 -tra5cha -tra8far -traf5t -1trag -tra6gl -tra6gr -t3rahm -1trai -t6rans -tra3sc -tra6st -3traue -t4re. -2trec -t3rech -t8reck -6t1red -t8ree -4t1reg -3treib -4treif -8t3reis -8trepo -tre6t5r -t3rev -4t3rez -1trib -t6rick -tri6er -2trig -t8rink -tri6o5d -trizi5 -tro1a -3troc -trocke6 -troi8d -tro8man. -tro3ny -5tropf -6t5rosa -t5roß -5trub -5trup -trut5 -1träg -6t1röh -5trüb -trü3bu -t1rüc -t1rüs -2ts -ts1ab -t1sac -tsa8d -ts1ak -t6s5alt -ts1an -ts1ar -ts3auf -t3schr -t5schä -tse6e -tsee5i -tsein6s -ts3ent -ts1er -t8serf -t4serk -t8sh -5t6sik -t4s3int -ts5ort. -t5s6por -t6sprei -t1st -t2ste -t6s5tanz -ts1th -t6stit -t4s3tor -1t2sua -t2s1uf -t8sum. -t2s1u8n -t2s1ur -2t1t -tt5eif -tte6sa -tt1ha -tt8ret -tt1sc -tt8ser -tt5s6z -1tuc -tuch5a -1tu1e -6tuh -t5uhr -tu1i -tu6it -1tumh -6t5umr -1tums -8tumt -6tund -6tunf -2t1unt -tu5ra -tu6rau -tu6re. -tu4r3er -2t1v -2t1w -1ty1 -ty6a -ty8la -8tym -6ty6o -2tz -tz5al -tz1an -tz1ar -t8zec -tzeh6 -tzehn5 -t6z5ei. -t6zor -t4z3um -t6zäu -5täg -6täh -t5ält -t8än -täre8 -8tä8st -6täuß -t5öffen -8tö8k -1tön -4tüb -t6ü5ber. -5tüch -1tür. -u3al. -u5alb -u5alf -u3alh -u5alk -u3alp -u3an. -ua5na -u3and -u5ans -u5ar. -ua6th -u1au -ua1y -u2bab -ubi5er. -u6b5rit -ubs2k -u5bö -u8büb -2uc -u1che -u6ch5ec -u1chi -uch1l -uch3m -uch5n -uch1r -uch5to -ucht5re -u1chu -uch1w -uck1a -uck5in -u1d -ud4a -u1ei -u6ela -uene8 -u6ep -u1er -uer1a -ue8rerl -uer5o -u8esc -u2est -u8ev -u1fa -u2f1ei -u4f3ent -u8ferh -uf1fr -uf1l -uf1ra -uf1re -uf1rä -uf1rü -uf1s2p -uf1st -uft1s -u8gabt -u8gad -u6gap -ugeb8 -u8gn -ugo3s4 -u1ha -u1he -u1hi -uh1le -u1ho -uh1re -u1hu -uh1w -u1hä -u1hö -6ui -ui5en -u1ig -u3ins -uin8tes -u5isch. -u1j -6uk -u1ke -u1ki -u1kl -u8klu -u1k6n -u5ky -u1la -uld8se -u1le -ul8lac -ul6lau -ul6le6l -ul6lo -ulni8 -u1lo -ulo6i -ult6a -ult8e -u1lu -ul2vr -u1lä -u1lö -3umfan -5umlau -umo8f -um8pho -u1mu -umu8s -u5mö -u1n1a -un2al -un6at -unau2 -6und. -5undein -un4d3um -3undzw -undü8 -un8düb -une2b -un1ec -une2h -un3eis -3unfal -1unfä -5ungea -3unglü -ung2s1 -un8gä -1u2nif -un4it -un8kro -unk5s -u1no -unpa2 -uns2p -unvol4 -unvoll5 -u5os. -u1pa -u1pi -u1p2l -u1pr -up4s3t -up2t1a -u1q -u1ra -ur5abs -ura8d -ur5ah -u6rak -ur3alt -u6rana -u6r5ans -u8rap -ur5a6ri -u8ratt -u1re -ur3eig -ur8gri -u1ri -ur5ins -3urlau -urmen6 -ur8nan -u1ro -3ursac -ur8sau -ur8sei -ur4sk -3urtei -u1ru -uru5i6 -uru6r -u1ry -ur2za -ur6zä -ur5ä6m -u5rö -u1rü -urück3 -u1sa -usa4gi -u2s1ar -u2s1au -u8schec -usch5wi -u2s1ei -use8kel -u8sl -u4st3a4b -us3tau -u2s1uf -u8surn -ut1ac -u1tal -uta8m -u1tan -ut1ar -u1tas -ut1au -u1te -u8teic -u4tent -u8terf -u6terin -u4t3hei -ut5ho -ut1hu -u1ti -utine5 -uti6q -u1to -uto5c -u1tr -ut1sa -ut1s6p -ut6stro -u1tu -utz5w -u1u -u1v -uve5n -uve3r4ä -u1w -u1xe -u5ya -uy5e6 -u1yi -u2z1eh -u8zerh -u5ö -uße6n -ußen5e -8vanb -6vang -6varb -var8d -va6t5a -va8tei -va2t1r -2v1b -6v5c -6vd -1ve -6ve5g6 -ver1 -ver5b -verb8l -ve2re2 -verg8 -ve2ru8 -ve1s -ve2s3p -ve3xe -2v1f -2v1g -6v5h -vi6el -vie6w5 -vi1g4 -vi8leh -vil6le. -8vint -vi1ru -vi1tr -2v1k -2v1l -2v1m -4v5n -8vo8f -voi6le -vol8lend -vol8li -v2or1 -vo2re -vo8rin -vo2ro -2v1p -8vra -v6re -2v2s -2v1t -2v1v -4v3w -2v1z -waffe8 -wa6g5n -1wah -wah8n -wa5la -wal8din -wal6ta -wan4dr -5ware -wa8ru -war4za -1was -w5c -w1d -5wech -we6fl -1weg -we8geng -weg5h -weg3l -we2g1r -weh6r5er -5weise -weit3r -wel2t -welt3r -we6rat -8werc -5werdu -wer4fl -5werk. -wer4ka -wer8ku -wer4ta -wer8term -we2sp -we8s4tend -we8str -we8stö -wet8ta -wich6s5t -1wid -wi2dr -wiede4 -wieder5 -wik6 -wim6ma -win4d3r -5wirt -wisch5l -1wj -6wk -2w1l -8w1n -wo1c -woche6 -wol6f -wor6t5r -6ws2 -w1sk -6w5t -5wunde. -wun6gr -wu1sc -wu2t1 -6w5w -wy5a -wärme5 -wä1sc -1xag -x1ak -x3a4men -8xamt -x1an -8x1b -x1c -1xe. -x3e4g -1xen -xe1ro -x1erz -1xes -8xf -x1g -8x1h -1xi -8xid -xi8so -4xiste -x1k -6x1l -x1m -8xn -1xo -8x5o6d -8x3p2 -x1r -x1s6 -8x1t -x6tak -x8terf -x2t1h -1xu -xu1e -x5ul -6x3w -x1z -5ya. -y5an. -y5ank -y1b -y1c -y6cha -y4chia -y1d -yen6n -y5ern -y1g -y5h -y5in -y1j -y1k2 -y1lak -yl1al -yla8m -y5lax -y1le -y1lo -y5lu -y8mn -ym1p2 -y3mu -y1na -yno2d -yn1t -y1on. -y1o4p -y5ou -ypo1 -y1pr -y8ps -y1r -yri3e -yr1r2 -ys5iat -ys8ty -y1t -y3w -y1z -yä8m -z5a6b -zab5l -8za6d -1zah -za5is -4z3ak -6z1am -5zange. -8zanl -2z1ara -6z5as -z5auf -3zaun -2z1b -6z1c -6z1d -1ze -ze4dik -4z3eff -8zein -zei4ta -zei8ters -ze6la -ze8lec -zel8th -4zemp -6z5engel -zen8zin -8zergä -zer8i -ze1ro -zers8 -zerta8 -zer8tab -zer8tag -8zerz -ze8ste -zeu6gr -2z1ex -2z1f8 -z1g -4z1h -1zi -zi1en -zi5es. -4z3imp -zi1na -6z5inf -6z5inni -zin6s5er -8zinsuf -zist5r -zi5th -zi1tr -6z1j -2z1k -2z1l -2z1m -6z1n -1zo -zo6gl -4z3oh -zo1on -zor6na8 -4z1p -z5q -6z1r -2z1s8 -2z1t -z4t3end -z4t3hei -z8thi -1zu -zu3al -zu1b4 -zu1f2 -6z5uhr -zun2a -8zunem -zunf8 -8zungl -zu1o -zup8fi -zu1s8 -zu1z -2z1v -zw8 -z1wal -5zweck -zwei3s -z1wel -z1wer -z6werg -8z5wes -1zwi -zwi1s -6z1wo -1zy -2z1z -zz8a -zzi1s -1zä -1zö -6zöl. -zö1le -1zü -2z1ü2b -ä1a6 -äb1l -ä1che -ä3chi -äch8sc -äch8sp -ä5chu -äck5a -äd1a -äd5era -ä6d5ia -ä1e -ä5fa -äf1l -äft6s -äg1h -äg3le -ä6g5nan -äg5str -ä1he -ä1hi -äh1le -äh5ne -1ähnl -äh1re -äh5ri -äh1ru -ä1hu -äh1w -6äi -ä1isc -ä6ische -ä5ism -ä5j -ä1k -äl1c -ä1le -ä8lei -äl6schl -ämi1e -äm8n -äm8s -ä5na -5änderu -äne5i8 -äng3l -änk5l -ä1no -än6s5c -ä1pa -äp6s5c -3äq -är1c -ä1re -äre8m -5ärgern -är6gl -ä1ri -3ärmel -ä1ro -ärt6s5 -ä1ru -3ärztl -ä5rö -ä6s5chen -äsen8s -äs1th -äta8b -ä1te -äteri4 -äter5it -ä6thy -ä1ti -3ätk -ä1to -ät8schl -äts1p -ä5tu -äub1l -äu1e -1äug -äu8ga -äu5i -ä1um. -ä1us. -1äuß -ä1z -ö1b -ö1che -ö5chi -öch8s2tei -öch8str -öcht6 -5ö6dem -5öffn -ö1he -öh1l8 -öh1re -ö1hu -ö1is -ö1ke -1ö2ko -1öl. -öl6k5l -öl8pl -ö1mu -ö5na -önig6s3 -ö1no -ö5o6t -öpf3l -öp6s5c -ö1re -ör8gli -ö1ri -ör8tr -ö1ru -5österr -ö1te -ö5th -ö1ti -ö1tu -ö1v -ö1w -öwe8 -ö2z -üb6e2 -3ü4ber1 -üb1l -üb1r -5ü2bu -ü1che -ü1chi -ü8ch3l -üch6s5c -ü8ck -ück1a -ück5ers -üd1a2 -ü6deu -üdi8t -ü2d1o4 -üd5s6 -üge4l5a -üg1l -üh5a -ü1he -ü8heh -ü6h5erk -üh1le -üh1re -üh1ru -ü1hu -üh1w -ü3k -ü1le -ül4l5a -ül8lo -ül4ps -ül6s5c -ü1lu -ün8da -ün8fei -ünk5l -ün8za -ün6zw -ü5pi -ü1re -ü8rei -ür8fl -ür8fr -ür8geng -ü1ri -ü1ro -ür8sta -ü1ru -üse8n -ü8sta -ü8stes -ü3ta -ü1te -ü1ti -üt8tr -ü1tu -üt8zei -ü1v -ß1a8 -5ßa. -ß8as -ß1b8 -ß1c -ß1d -1ße -ß5ec -8ße8g -8ße8h -2ß1ei -8ßem -ß1f8 -ß1g -ß1h -1ßi -ß1k -ß1l -ß1m -ß1n -ß1o -ß1p8 -ß5q -ß1r -ß1s2 -ßst8 -ß1ta -ß1te -ßt3hei -ß1ti -ß5to -ß1tr -1ßu8 -6ß5um -ß1v -ß1w -ß1z -2s1ta. -i2s1tal -2s1tani -2s1tan. -fe2s1ta -te2s1ta -nd2ste -ve2ste -3s2tec -4s3techn -3s2teg -3s2teh -3s2tein -3s2teig -3s2teif -3s2tell -3s2telz -a4s3tel -3s2temm -3s2temp -3s2tep -s3s2ter -t3s2tern -3s2teue -6s4teuro -bs2ti -te2s3ti -ve2sti -3s2tic -3s2tieb -3s2tieg -3s2tif -3s2til -3s2tim -3s2tink -3s2titu -a2s1to -gu2s1to -ku2s1to -i2s1tol -i2s1tor -ve2s1to -2s1tung -2s7tus -o2s1tul -aus3s4 -ens3s4 -gs3s4 -.mis2s1 -s2s1b8 -s2s3chen -s2s3d -s2s5ec -2s2s1ei -s2s3f -s2s1g -s2s3h -s2s3k -s2s3l -s2s3m -s2s3n -s2s3p8 -s2s5q -s2s3r -s2s3s2 -sss2t8 -as2s3te -is2s3te -us2s3te -üs2s3te -s2st3hei -s2s3ti -s2s1to -s2s1tr -6ss5um -s2s3v -s2s3w -s2s3z -1cker. -1ckert -1ckad -1cke. -1ckel -1cken -4ck1ent -1ckere -1ckern -1ckeru -1ckie -1ckig -1ckun diff --git a/dist-packages/wordaxe/wordaxe/dict/hyph_de_DE.dic b/dist-packages/wordaxe/wordaxe/dict/hyph_de_DE.dic deleted file mode 100755 index 97751f811..000000000 --- a/dist-packages/wordaxe/wordaxe/dict/hyph_de_DE.dic +++ /dev/null @@ -1,7500 +0,0 @@ -ISO8859-1 -.aa6l -.6a1ba -.ab3a4s -.a1be -.ab3ei -.a1bi2 -.ab3it -.a1b1l8 -.a1b1r -.a1b3u -.a1d -.a1do -.ad3o4r -.a2l1t -.alti6 -.a3na3c -.an5a2l1g -.a1n1e -.an2gs -.ang8s2t1 -.a6n1s -.ap1p -.a2r1s -.ar6sc -.a2r1t -.ar6ta -.8ar6tei -.as2z -.au2f1 -.au2s3 -.1be -.be5er1b -.be3na -.be2r1t -.ber6t5r6 -.1bi -.b6ie -.bie6r5 -.bi2ms -.bim6s5t -.1br -.brot3 -.br6u6s -.c8h6 -.1che6f5 -.1da8c -.da2r -.d2a1ri -.dar5in -.da1r5u -.1de -.den6ka -.de1re -.de5r6en -.de1sp2 -.des6pe -.de8s2p2o -.de3s1z -.1di -.dia3s4 -.dien4 -.dy2s1 -.e6hr -.eh1re -.ehre6n5 -.ei1ne6 -.ei6n5eh -.ei8nen -.ei6ns -.ein5sa -.e2n1d -.en1de -.en6der -.en6d5r4 -.en3k4 -.en8ta8 -.en1te -.en8tei -.en4t3r6 -.e1p2o1 -.er1b -.er1ba -.er6ban -.er1be -.er6b5ei -.erbl8 -.er6bla -.e8rd -.er1du -.er6d5um -.e1re -.2er3ei -.e1r5er -.2e1ri -.er3in -.er3o4b -.er1w -.erwi5s -.e1s1p2 -.e2s8t1l -.es8t1n -.ex1a2 -.ex3em -.fa2ls -.fal6sc -.1fe -.fe6s1t5a -.flu4g3 -.1fu -.furc8h8 -.1ga -.ga1ne -.ga6ner -.1ge -.ge3n4a -.ge5rö8 -.ge1s6 -.ha2lb5 -.hal1be6 -.hal6br -.haup4 -.hau4t -.hei1ma6 -.he4r3e -.her6za -.he5x -.hin3 -.hi2r1s -.hir8sc -.ho4c -.hu3sa -.hy5o -.i1be5 -.i1ma -.ima6ge -.in1 -.i1ni6 -.isc8h -.i1s5chi -.ja6gd5 -.1ka -.ka6l1k -.kal6k5o -.ka6ph -.ki4e -.1k2o -.kop6f3 -.1kr8 -.kraf6 -.1kü -.kü5ra -.la2b1b -.lab6br -.1li -.l8i1i -.liie6 -.lo6s5k2 -.1lös -.lö4s3t -.1ma -.ma5d2 -.1mi -.mi2t1 -.no6th -.no1to -.no6top -.o1be -.ob2e8ri -.o1b1l8 -.o8bs2 -.ob6s1t5e -.or3c -.o2r1t -.or2ts -.ort6s5e -.o1st3a -.os1te -.oste8r -.1pe -.p8er -.pe4re6 -.pe3t2s -.1ph6 -.1p2o -.po8str6 -.r6au4m3 -.re5an -.ro8q -.ru5the -.rü5b6e2 -.sc8h8 -.1se -.se6e -.se5n6h -.se5ra -.1si -.si2e -.sp2 -.1spi -.spi1k -.spi6ke -.st4 -.1sy -.sy2n1 -.1tag -.ta1ge -.tage4s5 -.tan6kl8 -.ta8th -.te6e -.te8str6 -.to1d -.to1de4 -.to6der -.t8o1ni -.to8nin -.to6we -.um1 -.u2mp -.umpf4 -.un1 -.u1ne6 -.un1ge -.ung8e5n -.ur1c -.u1re -.ur5en -.1ve -.ver1 -.v2e1ri -.ve6rin -.v2or1 -.vora8 -.1wah -.wah6l5 -.1weg -.we1ge -.we8ge1s -.we8s2t -.wes3te -.wo6r -.wor3a -.wu6n4s -.1zi -.zi4e -.1zu -.z2uc -.zuc8h8 -.ä2n1d -.än1de -.ände8re -.öc8h8 -aa1c -aa2gr4 -aa1l5e -aa6r5a -aa2r1t -a5arti -aa2s1t -aa2t2s -6a1ba -ab3a2r1t -a2b1d -1abdr4 -a1be -6ab8el -abe2n1d -aben6dr4 -ab5e6rk -ab5e2r1r -ab5e2s1se -1a2b1f8 -1a2b1g2 -a2b1h -1abhä -a1bi -ab1ir -a2b1k2 -1ab1k2o -a1bl8 -ab1la -5a6blag -a6bl8aß -ab4ler -a1b1lu -a8blä -5a6blö -a2b1m -ab1ma -abma5c -1a2b1n -a1br -ab1ra -ab1re -5a6brec -ab1ro -a8b1s -ab8s1k2 -abs2z -a2b1t -3abtei -a1bu -ab1ur -1a2b1w -a8b1z -5ab1ze -5ab1zu -a1bä -ab1än -abäu8 -a1ce -a4ce. -ac8h -a5chal -ach5a2r1t -ach5au -a1che -a8chent -ach6er. -a6ch5er1f -a1chi -ach1l6 -a6ch3m -a2ch5n6 -a1cho -ac6hr -ach3re -a1chu -a8ch1w -a1chy -ach5äf -ac2k -ac1k2o -a2c2ks -acks6t8 -ack5sta -a1d -8ad. -a6d5ac -ad3ant -ad8ar -a2d1d -5ad1di -a1de -a8dein -ade5o8 -a1di -adi5en -1ad1j -a2d1l -1adle -a1do -ad1op -adr4 -a2dre -3adres -a2dt1 -1a2d1v -a6dä -a1e2d -ae1r -a1er. -1aero -8afa -a3fal -af1an -a5far -a5fat -af1au -a1fe -a6fen2t1l -a2f1ex1 -a2ff -af1fr6 -afr6 -af5rau -af1re -1afri -a2ft -af1te -af6tent -af1tr6 -af6tra -aft5re -a1fu -a6f5um -8a1fä -a1ga -ag5a1be -a1ge -5a4gent -ag8er -age1s -age1s5e -a2g1g -1ag1gr4 -a1g5las -ag1lo -a1gn8 -ag2ne -a1go -1a2g8o1g -a1gu -a6g5u2n1d -a1ha -a1he -a2h5ein -a4h3er1h8 -a1hi -ahl1a -ah1le -ah1ma -ah4m3ar -ahn1a -a5ho -a6hr -ahra6 -ahr5ab -ah1re -ah8rei -ahre6n8s -ahre4s3 -ah2r1t -ahr8ti -ah1ru -a1hu -ah8ö -ai1d -ai3d2s -ai1e -a2if6 -ai6ns -a3in1se -ai1re -ai4re. -aisc8h -a5i8s2ch. -ai1s8e -ai2s1m -a3ismu -ais6n -ai1so6 -a1j -a1ka -1aka1d -a4ka1de -a1ke -a1ki -a2k1k2 -1ak1k2o -a1kr8 -5akro1 -a5lal -al5a6ns -3al8a6r1m -a2lb -al1be -al8be1b -al8ber1w -albl8 -alb5la -al1bu -3album -a8l1c -a1le -ale1b -a6l5e6be -a4l3ein -a8lel -a1ler -a8ler1b -a8ler1h8 -a6le2r1t -5a6l5eth -a2l1g -1al1gi -al4g1li -a1li -a2l3int -a6ll -al1la -al4lab -al8lan -al4l3ar -al1le -alle3g -a1lo -a4l5ob -a2ls -al1sc -alsc8h -al6s6ch2m -a2l1t -al4the -altr6 -al4t3re -8a1lu -al6u5i -a6lur -alu3ta -a1lä -a1ma -a6ma1te -a1me -8ame. -5a6mei1se -a2m1m -am1me -am6m5ei -am6mum -am2n8 -a2mp -ampf3a6 -a2ms -am1sc -amsc8h -am6s8chw -a2m1t -am2ta -a1mu -a1mä -a3na2c -a1na1d -ana1di -anadi5e -an3a1k2o -an3a6l1p -3a8na1ly -an3a1me -an3a2ra -a1nas -an5as1ti -a1nat -ana2t5s -a2n1d -an1de -an8dent -ande4s3 -a1ne -a2n1ec -a8n5eis -an1e2k -4aner. -a6n5e8rd -a8ner1f -ane6rk -a6n5er1ke -an1f -1anfa -an1fe -5anfe2r1t -1an1fä -an1ga -3angab -an1ge -ange1b -5ange1bo -an3g1li -ang6lis -an2gn8 -an1gr4 -3angri -an2g5t6 -an1h -5anhä -a1ni -ani5g -an6i1k -ani4ka -an2io -an5i8on -an1kl8 -an1k2n8 -an6kno -an1kr8 -an4kro -1an5l6 -an1m -an1ma -anma5c -anmar4 -an1na -3an1n2ah -an1ne -anne4s3 -a1no -5a6n1o2d -5a6n3o1ma -5a6no8rd -1an1r -a6ns -an1sa -an1sc -ansc8h -5anschl6 -an1so -an4so1z -an1st -an1sta -5anstal -an1s2z -an1te -5antenn -an1th -a8n1w -5anwä -a5ny -an1ze -an4z3e1d -5anzeig -an1zi -5anzieh -an1zu -3anzug -an1ä -5anäs -a1nö -anö8d -a1os -a1pa -ap1fe -3apfel -a1ph -a2ph1t2 -aph5ä6 -a1pi -8apl8 -a1p2o -apo1c -apo1s -a6pos2t -a6po1th -1ap1pa -ap1pr6 -a1pr6 -a5pä -a3pü -a1ra -a4r3af -a2r3a6ll -ar1b -ar1be -3arbei -2ar2b1t -ar1c -2a1re -ar3ein -a8r1g -ar2gl -2a1ri -ar2ie -ari5es -a6rk -ar1ke -ar8ke2r1s -ar1l -ar6les -a2rn -ar1na -ar4nan -ar5o6c8h -ar1o2d -a1rol -ar2on -ar3ony -a8ror -a3ros -ar5ox -a2r1s -arsc8h -ar6schl6 -a2r1t -8ar5tei -artr6 -ar6t5ri -a1ru -a1ry -1ar2z1t -arz1w8 -ar8zä -arä8m -arö6 -ar5öm -ar1ü2 -a1sa -asc8h -a1sche -a6schec -asch5l6 -as6ch3m -a6s2chn6 -as1h -a3s4hi -asp2 -as1pa -asp5l8 -as1te -as5te1v -1a2sth -a1str6 -ast3re -8a1ta -ata5c -ata3la -a6tapf -at8a5pl8 -a1te -a6te1li -aten5a -ate5ran -6a2t1f2 -6a2t1g2 -a1th -ath6a -at3hal -1a2th1l -2a1ti -a2t1l -atl8a -5atlant -3atlas -a2t1m8 -8atmus -6a2t1n -a1to -a6t5ops -ato6ra -ato2r1t -a6t5ort. -4a1tr6 -a6t5ru -a2t1t -at2t1h -at5t6hä -6a1tu -a2tz -atz1w8 -a1tä -a1tü -au1a -au1br -au6bre -a2uc -auc8h -auch3a -au1e -aue4l -au1fe -5au4f3ent -au2ff -3auf1fü -au2f1g2 -3auf1ga -1au2f1n -au2f1t -3auf1tr6 -1au2f1w -au1ge -3auge. -a6uk -au1kl8 -au4kle -au1le -aule8s -6aum -au1ma -au8mar -au2m5p -1au2s1b2 -3au2s1d -1au2s1f2 -1au2s3g2 -au1si -au8sin -au4sta -1au2s1w -1aus1z -au1te -aut5eng -au1th -1au1to -au1ße8 -a1v -a1ve -aver1 -ave5r6a -av2e1r6i -a1w -a6wes -a1x -a1xi -a2xia -a6x2io -a1ya -a1z -a1zi -azi5er. -8aß -1ba -ba1d -ba1de -8ba8del -ba1la -ba1na -ban6k5r8 -ba5ot -ba8rd -bar1di -bardi6n -ba1ro -bas1te -basten6 -bau3sp2 -2b1b -bbl8 -bb6le -b2b1li -2b1c -2b1d -1be -be1a -be8at. -be1c8h -8be2cht -bec2k -8be1cke. -be5el -be1en -bee3r4e -be2e8rei -be5e1ta -be1f2 -8be2ff -be1g2 -be1hö8 -bei1s -bei1se -6b5eisen -bei3tr6 -b8el -b4e1l8o -be1lu -belu3t2 -be3na2c -be2n1d -ben1d6o -be1ne -be6n5e2r1s -be6n5er1w -be1no -be4nor -be6ns -ben4se6 -bens5el -be1nä -be1nü -be1o2 -b8er. -be1ra -be8rac -be8r1g -ber1ga -ber8gab. -be2r1r -be1rü -be1s8c -be1se -bes5er1h8 -be1s2p2 -be5th6a -be2ts -bet5sc -be1un -be1ur -8bex -be1z -bezw8 -be6zwec -2b1f8 -2b1g2 -b1ga -bga2s5 -b1ge1 -2b1h -bho1le6 -1bi -bi1bl8 -b6ie -bi1el -b2il -bi1la -bi1lä5 -bi1na -bi1no -bi4no1k -bi6stu -bi5tr6 -bi2t1t -bit4t5r6 -b1j -2b1k2 -b1kü6 -bl8 -b6la. -6b1la1d -6blag -8blam -1blat -b8la2t1t -3blau. -b6la1v -3ble. -b1le1b -b1le1d -8b1leg -8b1leh -8blei1d -8bleih -6b3lein -bl4e4m3o2 -b1li -4blic8h -b4li2n1d -8bling -b2l2io -5blit -b4li2tz -b1loh -8b1los -1blu -5blum -2b1lun -blut3a -blu2ts -blut5sc -3blä -blä6s5c -5blö -3blü -blü8sc -2b1m -2b1n -1bo -bo1c8h -bo1d -bo5d6s -boe5 -8bo2ff -8bon1k -bo1ra -b1o2r1t -2b1p2 -b1q -1br -bra2il6 -brast8 -bre4a -b5re1d -8bre1f -br2ie -8b5riem -b1r2ig -b6ri1ga -bro1s -b1rup -b2ruz -8bröh -brö2s5c -8bs -b1sa -b8sang -b2s1ar -b1sc -b1se -bs3er1l -bs3erz -b1so -b8sof -b1s2p2 -b2st1h -bstr6 -b3stru -b5stä -b1su -b6sun -2b1t -b2t1h -1bu -b6ui -bu1ie -bu6l6k -b8u1re -bu1si -bu6sin -6b1v -2b1w -1by1 -by1t -by6te. -8b1z -1bä -b5ä6s5 -1bü -bü1b6e2 -b3ü4ber1 -b6ü5be1re -bü1ge6 -büge1l5e -bü2r1s -bür6sc -1ca -cag6 -ca5la -c2a6re -ca5y -c1c -1ce -ce1li -celi4c -celic8h5 -ce1ro -c8h -2ch. -1c8hae -ch1ah -ch3a2k1t -cha1me -cha6mer -8c6hanz -5cha1ra -3ch2a1ri -5cha1to -6ch1b2 -1che1f -6chei -ch3e2il -ch3eis -che6rk -6cher1kl8 -6ch1f2 -4ch1h -5chia1d -5chias -6chi6ns -8ch1j -chl6 -5ch4lor -6ch2m -2chn6 -ch1ni -ch8n6ie -5cho. -8chob -c2hoi -choi8d -6ch1p -c6hr -ch3ren -ch6res -ch3rü -2chs -2cht -cht5h6a -cht3hi -5chth2on -ch1ti -ch6tin -6chuh -chu4la -6ch3unt -chu2t6t -8chw -1ci -ci5tr6 -c2k -2ck. -ck1ei -4c2k1h -ck3l8 -ck3n8 -c1k2o -ck5o8f -c1k1r8 -2c2ks -ckst8 -ckstr6 -ck5stra -ck6s5u -c2l -1c8o -con6ne -8cor1b -cos6t -c3q -1c6r -8c1t -1cu -1cy -5cä1 -cö5 -1da. -8daas -2d1a2b1g2 -8da1br -6da2b1t -6d1a2b1w -1dac -da2gr4 -6d5a6l1k -8d5a2m1t -dan1c -dan1ce -dan6ce. -da1ni -dan6ie -dani5er -dan1ke -dan8ker -2d1an5l6 -danla6 -6da6ns -8dan1zi -6dan1zu -d1ap -da2r1a8 -2d1ar1b -d3ar1c -da6r1m -dar1me -dar6men -4d3a2r1t -8darz -1dat -8da2t1m8 -2d1auf -2d1aus -2d1b -2d1c -2d1d -d5de -d3d2h -d1dä -ddä2m1m -ddäm1me8 -1de -2deal -de5an -dec8h -de3cha -de1e -de1f -de1fe6 -6de2ff -2d1e6hr -5d4eic -de5isc -de1la -de8lar -de2ls -del6s5e -del1sp2 -del6spr6 -de1ma -de4mag -de1mu -de8mun -de1ne -de8nep -dene6r -den1ge -8denge. -8deng8en -de5o6d -2deol -de5ram -de8rd -8der2d1b -de1re -d2erei -der5ein -de1ro -de2r1r -d8e2r1s -d2e1ru -der5um -de1sa -de4s3am -de4s3an -de4sau -de1si -de6s2il -de4sin -de1so -de8sor -de1sp2 -de4spr6 -de2su -8deul -de5us. -2d1f -df2l -2d1g -2d1h -1di -dia5c -di5a1ra -di1ce5 -dic8h -di3c6hr -di5ena -di1gn8 -d2il -di1la -di2l8s -di1na -8di2n1d -6din1f -4d3in1h -2d1i6ns -d2io -di5o6d -d2ip -di3p4t -di1se -di8sen -dis1p2 -dis1pe -di5s8p8er -di6s5to -dis3tr6 -di1ta -di8tan -di1ti -di8tin -d1j -6dje -2dju -2d1k -2d1l -2d1m -2d1n6 -d1ni6 -dn1j -dnje6 -1do -6d5o1be -do6ber1f -6d5ony -do3ran -6do8rd -2d1o8r1g -do2r1t -dor4t3h -6do1th -do2t1t -dott8e -2d1p -d5q -dr4 -1drah -8drak -d5r6a2n1d -6d1re. -4drec8h -d6rec2k -4d3reg -8d3reic -dre2if -d5rei1fe -8drem -8d1ren -2d1rer -8dres. -6d5r1h8 -1dria -d1ric -8dri2n1d -droi6 -dro5x -1dru -8drut -drö2s5c -1drü -drü5b -drü8sc -2ds -d1sa -d6san -d1sat6 -d1sc -dsc8h -5d6scha. -d1schi -5dschi1k -d1se -dse8e -d8se8r1g -8ds1l -d1sp2 -d4s1pak -ds2p2o -d8spä -d1st -d1sü -2dt -d1ta -d1te -d1ti -d1to -d2t1s6 -d1tu -d5tä -1du -du5a2ls -du1b6 -du1e -du2ft -duf4t3r6 -4d3uh -d6ui -du5ie -8du2m1l -8du2m1w -2d1u2n1d -du8ni -6d5unt -dur2c -durc8h3 -6dur1l -du2r1s -6dursa -8du2r1t -dus1t -dusc8h -du8sc6hr -2d1v -2d1w -dwa8l -2d1z -1dä -6däh -8dä2n1d -dä6r -dö1b -dö8bl8 -d5öl -dör1f -dör6fl -dö8sc -d5ö4st -1dü -ea1be -ea4ben -e1ac -e1ah -e1a2k1t -e1al. -e5al1f -e1a2l1g -ea1li -e5a8lin -e1a6l1k -e1a6ll -e5a6l1p -e1a2l1t -e5a2l1w -e1am -e1a2n1d -ea1ni -ea6nim -e1ar. -e5ar1f -e1a6rk -e5a6r1m -e3a2r1t -e5at. -e6a1te -e6a5t6l -e8a2ts -e5a2t1t -e6au. -e1aus -e1b -e1ba -e6b5am -e1be -ebe6ns -eben4s5e6 -ebl8 -eb1li -eb4lie -e8bs -eb1se -eb4ser -eb1si -eb4s3in -ec8h -e1che -e8cherz -e1chi -e6ch3m -8e2ch3n6 -ec6h1r -e2chs -ech1se -ech8se2n1d -ech4su -e1chu -ec2k -ec1ka -eck5an -e5c2l -e1d -ee5a -ee3e -ee5g -e1ei -ee5isc -eei4s3t -ee1le -ee6le2n1d -e1e6ll -ee5lö -e1e8rd -ee3r4e -ee8reng -eere6s5 -ee5rä -ee1ta -ee6tat -e1ex -e1f -e6fau -e1fe -e8fe8b -e2ff -ef5fe -3effek -efr6 -ef3rom -e1ge -ege6ra -eg1los -eglo6si -1egy -e1ha -e6h5ac8h -eh5a6ns -e6hap -eh5auf -e1he -e1hi -ehl3a -eh1le -eh3l5ein -eh1mu -eh1ne -eh2n5ec -e1ho -e6hr -ehr1a -eh1re -ehre6n -eh1ri -eh1ru -eh6r5um -e1hu -eh1w -e1hy -e1hä -e1hö -e3hüt -ei1a -eia6s -ei1ba -ei6bar -eic8h -eich3a -eic6h5r -ei1d -ei4dar -ei1de -ei6d5ei -ei8der1f -ei2ds -ei3d4sc -ei1e -e2if -ei1fe -8eifen -eifr6 -3eifri -1eign8 -e2il -ei6l1d -ei1ma -ei6mab -ei8mag -ein1a4 -ei8nat -ei1ne -ei8ner1h8 -ei8ness -ei6ne1te -ein1g -e8i1ni -ein1k -ei1no -ei6n5o1d -ei8no1k -ei4nor -ei6ns -e3in1sä -e2i1o -e1i2rr -ei5ru -ei1sa -ei8sab -eisc8h -ei5s2chn6 -ei1se -ei6s5ent -ei1so -ei8sol -ei1ta -ei4t3al -eit3ar -eit1h -ei6thi -ei8tho -ei2ts -eit1sam -eit8sa2m1t -ei1tu -ei6t5um -e1j -1e4k1d -e1ke -e1ki -e1k2l8 -e1kn8 -ek2ni4 -e1la -e2l1al -6elan -e6lan1f -e8l1an5l6 -e6l5a6ns -el3ar1b -el3a6r1m -e6l3a2r1t -5e6las1ti -e6lau1ge -e2lb -el8bs -elb1st5a -e1le -6ele1f -ele6h -e6l5e1he -e8le2if -e6l5ein1h -1elek -e8lel -3ele1me -e6lemen -e6len1te -e6l5e1pi -e1ler -e4l3e2rr -ele2r1s -e6l5ersc -el1f -elf2l -e2l1g2 -e1li -e6l5i6ns -e6ll -el1le -el1l8er -4e1lo -e4l3o1fe -e2ls -el1so -el8soh -e2l1t -el5ten -el8tent -5elte2rn -e1lu -elut2 -e1lä -e1lü -e2m1d -em1de -em8dei -e2m1m -em1me -em8meis -4e1mo -emo5s -e2mp -1emp1f -1emp1t -e2m1t -1emto -e1mu -emu6rk4 -emur2ks5 -e1mä -ena1be -en5a6ben -ena2c -enac8h -en5a2chs -en5ac2k -e1na1d -en5af -en5a6ll -en3a2l1t -en1am -en3an. -en3ant -en3anz -en1a6p -en1ar -en1a6s -6e1nat -e4n3auf -en3aus -en1c -en2ce -e2n1d -enda6l -en1de -end5er1f -end5e8r1g -en8dess -e1ne -4ene. -e2nec -en5ec2k -ene1f -e8ne2ff -e6n5e6hr -e6n5eim -e8n3eis -6enem. -6enen -e4nent -4ener. -e8ne8rd -e6n3er1f -e4ne8r1g -5ener1gi -ener1l -e6n5erla -en5e2r1s -e6nerst -en5er1w -6enes -e6n5ess -e2nex -en3glo -2e1ni -en1ni -enn6i6s5 -en1no -enn6os4 -en6ns8 -e1no -eno1be -e6nober -eno8f -e6n5opf -e4n3o8rd -e6ns -en1se -en8se2r1s -ens1k2 -ens8kl8 -en1sp2 -ens1p2o -ens6por -en1ta -en5t6ag -en4t3a5go -en1te -enter1b -en8ter1bu -en1ti -en6ti1d -en2t1l -3entl8a -en1tr6 -ent5ric -en2t1w -5entwic -5entwu -1en2t1z -e1nu -en6u5i -e3ny -en1za -en8zan -en1öf -e1nös -e1nüg -eo1c -e5o6fe -eo1k -e5o2k1k2 -e1on. -e3on1f -e5on1k -e5on5l6 -e5on1r -e5opf -e5ops -e5or. -e1o8rd -e1o8r1g -eo5r6h8 -eo1t -e1pa -e1pe -e8pee -e6p5e6g -ep5ent -e1p2f -e1pi -5epi1d -epi1de -e6pidem -e1pl8 -e1p2o -5epos -e6pos. -ep4p3a -e1pr6 -e1pä -e1q -e1ra. -er5aal -8er6a1ba -era1be -e5r6ab8el -er5a6ben -e5ra1bi -er3a8b1s -er3ac8h -era5e -era5k6l8 -e2r3a6ll -er3a2m1t -e3r6a2n1d -e3ra1ne -er3a6ns -e5r8anz. -e1rap -er3ar1c -e3r2a1ri -er3a6si -e1rat -era2t3s -er3auf -e3r6aum -er1b -er8bs -3erb1se -er1c -e1re -4e5re. -er3ec2k -er5e2g1g -e2r5e2h -2erei -e3r4ei. -e8rei1ne -e6r5ein1r -6eren. -e4r3en1m -e1rer -4erer. -e6r5e6r1m -er5er5o -ere2r1s -er5erst -e4r3erz -e2r3ess -er1f -er1fü -5erfül -e8r1g -er1ga -er8gan. -er1ge -erge1b -5erge2b1n -er2g5h -er1gä -5ergänz -er1h8 -5erhö1hu -2e1ri -eri5ak -e6r5iat -e4r3i2n1d -e6r5i6n5i6 -er5i6ns -e6r5int -er2it -eri1ti -er5it2io -e6rk -er1kl8 -3er1klä -er1l -er1lös -5erlös. -e6r1m -er1me -erme6n6s -e2rn -er1na -er6nab -er6ns -3ernst -6e1ro. -e1ro1d -er1o2f -e1r8o1g -6e3roi -ero5i1d -ero8i1de -e3rol -e1rom -e1r2on -e3rop8 -e2r1or -e1ros -e1rot -er5ox -e2r1s -ersc8h4 -er1sta -5erstat -e2r1t -er5tei -er6t5ein -er2t1h -er5t6her -2e1ru -e1r4uf -eru2f4s3 -e4r3u6hr -e2r3u2ms -e5r6us -er1w -5erwer1b -e1ry -erzw8 -er5zwa -er3zwu -erä8m -er5äs -erö8 -e3rös. -e6r1ü2b -e1sa -esa8b -e8s1ap -e6s5a6v -e1sc -esc8h -esch4l6 -e1se -ese1a -ese1b -es5e1be -eser1v -eser1ve5 -e8s1h -e1si -es2il -es5i6ll -es3int -es1k2 -es1k2o -es4kop -e2s1l -e1so -eso8b -e1sp2 -es1pe -es4pei6s5 -es2p2o -es2pu -e2s1se -5essenz -e1sta -e6s4ta8b1s -e6staf -e6st5ak -est3ar -e1sto -e8stob -e1str6 -e6st5res -e1su -es3ur -e2s1z -e1sü -e1ta -e1t8ag -e1t2a1ri -etar2i5e -et8a8ta -e1te -eten6te -eth6a -et5hal -e5thel -e1ti -1e2t1n -e1to -e1tr6 -e2t3rec -e2ts -etsc8h -e8tscha -et8se -e2t1t -et6tei -et2th -et2t1r6 -e1tu -etu1s -e2tz -et1ze -et8zent -et8zw8 -e1tä -e1tö -e1tü -eu1a2 -eu1e -eu1er -eue1re -eu2e8rei -eu5fe -e6ui -euin5 -e6uk2 -e1um. -eu1ni -eu6n2io -eun1te -e5unter -eu1o6 -eu5p -eu1ro -3europ -eu1sp2 -eu5str6 -eu8zo -e1v -eva2l6s -e1ve -ever1 -eve2re2 -eve5r6en -ev2e1r4i -e1w -e2wig -e1xo -ex1or -1e8x3p2 -e8x1t -1extr6 -ey3er. -e1z -e1ä2 -e5ö8 -e1ü -e1ße -e8ßes -fac8h -fa6ch5i -fa1d -fa1de8 -fa6del -fa5el. -fa6ll -fal6lo -fa2l1t -falt8e -fa1na -fan4gr4 -6f1an5l6 -6fap -far1b -far6ba -far4bl8 -fa2rr -far6r5a -2f1a2r1t -fa1sc -fau8str6 -fa3y -2f1b2 -6f1c -2f1d -1fe -2f1ec2k -fe1d -fe6dr4 -feh1le -feh6lei -f6eim -8fei6ns -f5eis -fe1le -fel5en -fe2l1t -8f5elte2rn -8fe2mp -fe5rant -fe8rd -4ferd. -fe2rr -fer1ri8 -fe1sto -fe8stof -fe6str6 -fe1stu -fe6stum -fe1ta -fe8t8ag -fe2t1t -fet6ta -fex1 -2ff -f1fa -f6f5a6r1m -f5fe -ffe5in -ffe6la -ffe1le -ffe8ler -2f2f1f -f1fla -ffl8e -ff3lei -ff1li -ff4lie -f2fs -ff8sa -ff6s5ta -2f1g2 -f1ge -fge1w -fgewen6 -4f1h -1fi -fi1d4 -fi3ds -fie1b4 -f2il -fi1la -fi1le -fi8lei -fi2l1m -fil4m5a -f8in. -fi1na -8fin1f -fisc8h -fi8scho -fi6u -6f1j -2f1k2 -f8lanz -fl8e -4f3lein -f1li -8flib -4fling -f2lix -6f3l2on -5flop -1flor -5f8läc -3flöt -2f1m -2f1n -1fo -foh1 -f2on -fo6na -2f1op -fo5ra -fo6r1m -for1me -for8mei -fo2r1s -for8str6 -fo2r1t -for8th -for6t5r6 -fo5ru -fo2t1t -6f5otte -2f1p8 -f1q -fr6 -f5ram -1f8ran -f8r8aß -f8re. -frei1 -5fr4ei. -f3reic -f3rest -f1rib -8f1ric -6f1r2ig -1fr2is -fr2on -fro8na -fräs5t -2fs -f1sc -f1se -f2s1er -f5str6 -f1stä -fs3tät -2ft -f1tak -f1te -ft5e6h -fte1re6 -ft1h -f1ti -f5to -f1tr6 -ft5ra1d -f2ts -ft1sc -ft2so -f1tu -f2t1w -ft1wi3d4 -f2t1z -1fu -6f5u2ms -6fun1f -fun4ka -fu1ße -fuße6n -fu8ße2n1d -6f1v -2f1w -2f1z -1fä -fä1c -8fä6r1m -6f1äug -fä8ß -fö1de3 -8föf -3för -1fü -fün1f -fün4f3u -1ga -ga6bl8 -6g1a2b1w -8ga8b1z -ga1d -ga1de -g3a4der -ga8ho -ga5isc -4gak -ga1la -6g5a2m1t -ga1na -ga1ne -gan5er1b -1gan6g5a -ga5n1j -6g1an5l6 -ga6ns -8gan1sc -6gar1b -2g1ar1c -2g1a6r1m -ga5ro -ga2r1t -6g3arti -ga8sa -ga8sc -ga1str6 -ga6st3re -2g1a2t1m8 -6g5auf -gau5fr6 -g5aus -2g1b -g5c -6gd -g1da -1ge -ge1a2 -ge6an -ge8at. -ge1e2 -ge6es -ge1f2 -8ge2ff -ge1g2l -ge1im -4g3ei1se -geist5r6 -ge2lb -gel1br -gel8bra -ge2l1t -gel2t8s -ge5lö -g2e1ni -ge8nin -gen3k -6g5en2t1f2 -ge3nä -ge1or -ge1ra -ge6rab -ger8au -ger1h8 -8gerhö -g2e1ri -ger8i6ns -ge1ro -6g5erz. -ge1rä -ge1rü -ge1s -ge1s2p2 -ges1te -ge2s7te. -ge2s7ten -ge2s7ter -ges1ti -ge2s7ti1k -ge5unt -4g3ex3 -2g1f8 -2g1g -g1ha -6g1hei -5ghel. -g5henn -6g1hi -g1ho -1g6hr -g1hö -1gi -g2il -gi5la -gi1me -gi8me. -gi1na -4g3i6ns -gis1tr6 -g1j -2g1k -8gl. -1gla1d -g5lag -glan4z3 -1glas -6glass -5glaub -g3lauf -1gle. -g5le1b -3gleic -g3lein -5gleis -1glem -2g1ler -8g3leu -g1li -gli8a -g2lie -3glie1d -1g2li1k -1g2lim -g6l2io -1gloa -5glom -1gl2on -1glop -g1los -g4loss -g5luf -1g2ly -1glü -2g1m -gn8 -6gn. -1gna -gna2c -8gnac8h -2g1n2ah -g1nas -g1ne -g8neu -g1ni -g2n6ie -g3n6is -1gno -8gnot -1go -goe1 -8gof -2g8o1g -5go1gr4 -6g5oh -g8o1ni -gon6i5e -gon6is -6gonist -go1ra -8go8rd -2g1p2 -g1q -1gr4 -g5rahm -gra8m -gra4s3t -6g1rec -gre6ge -4g3reic -g5reit -8grenn -gr2i4e -g5riem -5gr2if -2g1r2ig -g5ring -6groh -2grot -gro6ß -4grut -2gs -gs1ab -g5sah -gs1ak -gs1an -g1s8a2n1d -gs1ar -gs1au -g1sc -g1se -gs1e1f -g5s8e2il -gs5ein -g2s1er -g1si -gs1in -g2s1o -gso2r -gsp2 -gs1pr6 -g2s1u -2g1t -g3te -g2t1h -1gu -gu5as -gu2e -2gue. -6gue1d -4g3uh -8gu2ms -6g5unt -gut3h -gu2tu -4g1v -2g1w -gy1n -g1z -1gä -8gä8m -6gä6r1m -1gö -1gü -6güb -1haa -ha1b8r -ha1d -ha1de -ha8del -hade4n -8hae -ha5el. -ha2ft -haf6tr6 -2hal. -ha1la -ha2lb -hal4b5a -6ha1le -8han. -ha1na -ha2n1d -han6dr4 -han1ge -han6ge. -2ha1ni -h5an1th -6hanz -6har1b -h3ar1be -ha6r1m -h3ar1me -ha5ro -ha2t1h -h1a2t1m8 -hau1sa -hau6san -h8a8ß -h1b2 -h1c -h1d -he1b -he2bl8 -hec8h -he3cho -h3e2cht -he1d -he5d6s -he1f -5he2ft -he1he -h5e6he. -hei1d -hei8ds -h1e2if -2hein -he3i2s1m -he5i2st. -hei2t8s3 -he2k1t -hek6ta -he6ll -hel1la -hel8lau -8he2l1t -he1me -he6mer -1he2m1m -6h1e2mp -he1ne -h6enen -hen5e2n1d -henkl8 -hen5klo -hen1tr6 -hen6tri -he2nu -8heo -he8q -her3ab -he5rak -her3an -4he1rap -her3au -her1b -h3er1bi -he1ro -he8ro8b -h2e1ru -he4r3um -her1ze -her6z5er -he1sp2 -he4s1pe -he1st -he1ta6 -het5am -he5th -heu3sc -he1xa -hey5e -h1f2 -h1g -h1go -hgol8 -h1h -h1iat -hi2e6r5i -hi1k -hi5k1t -h2il -hil1a2 -hil1f -hil4fr6 -hi5nak -hin4ta -hi2nu -h2io -hi5ob -hi2rn -hir5n5e -hir6ner -hi1sp2 -hi1th -hi5tr6 -5hi2tz -h1j -h6jo -h1k2 -hla2b1b4 -hla4ga -hla6gr4 -h5lai -hl8am -h1las -h1l8aß -h8l1c -h1le1d -h3lein -h1ler -h5l6er. -h1li -h2l2if -h2lim -h8lin1f -h2l5int -h2l2ip -h2lit -h4lor -h3l6o1se -h1läs -h1me -hme5e -h1ne -h2nee -h2nei -hn3eig -h2nel -hne8n -hne4p3f -h2n8erz -h6ne2tz -h1ni -h2n2ip -h2nit -h1nol -h6ns -hn5sp2 -h1nu -h2n2uc -h2nu1d -h2nul -hoc8h1 -1hoh -hoh1le -hoh8lei -2hoi -ho1la -ho4l3ar -1holz -h2on -ho1ra -6ho8r1g -ho2rn -5horn. -ho3s1l -hos1p2 -ho4spi -h1p -hpi6 -h1q -6hr -h1rai -h8rank -h5r6aum -hr1c -hr1c6r -hrcre8 -h1re1d -h3reg -h8r4ei. -h1rer -h4r3er1b -h8re2r1t -h8r1g2 -h1ric -hr5i6ns -h2rom -h2r1t -hr6t5er1l -hr2t1h -hrtr6 -hr6t5ra -hr8tri -h6rum -hr1z -hs3ac8h -h1sam -h6s5a2m1t -h1sc -h1se -h6s5ec -h6s5er1l -hs8erle -h1so -h4sob -h1sp2 -h8sp8aß -hs1pe -h8spel -hs6p2o -hs1pu -h4spun -h1str6 -h1stu -h4s3tum -h1su -hs3u2n1d -h1sü -h5ta. -h5tab -ht3ac -ht1ak -ht3ang -h5tanz -ht1ar -ht1at -h5taub -h1te -h2t1ec -hte1f -h4t3e2ff -h2t3e1he -h4t3e2if -h8teim -h4t3ein -h4t3eis -h6te2mp -h8ten2t1f2 -hte1re -hte8ren -hter1f4 -h6ter1fü -hte8r1g -h8ter1gr4 -h4t3er1h8 -hte2r1s -h6t5ersc -h8terst -h8te1se -h8tess -h2t1eu -h4t3ex -ht1he -ht5hu -h1ti -htr6 -ht5rak -h2ts -hts3ah -ht1sc -ht1se -ht6sex -ht8s1k2 -ht8so -h1tu -h2tz8 -h5tüm -hub5l8 -hu6b5r -huh1l -hu6hr -h5uhr. -hu6ld -huld5a6 -hu1le -hu8lent -hu8lä -h5up. -h1v -h5weib -h3weis -h1z -hä1k -hä8kl8 -hä2l8s -hä1ma -häm6a8tu8 -häsc8h -hä1sche -hä8sche. -hä2t1s -häu4s3c -2hö. -2höe -8höi -hö6s -hös5c -hüh1ne6 -hü2ls -hül4s3t -hü2t1t -hütte8re -ia1d -i5a2d1n6 -i1af -i5ak. -i1al. -i1al1a -i1a2lb -i1a6ld -ia1le -i5alei -i1al1f -i1a2l1g -i3a6l1h -i1a6l1k -i1a6ll -i1a6l1p -i1a8l1r -i1a2ls -i1a2l1t -i1al1v -i5a2l1w -i3alz -i1an. -ia5na -i3a2n1d -ia1n8e -ia8ne8b -i1ang -i3ank -i5ann -i1ant -i1anz -i6a1p2o -i1ar. -ia1ra -ia6rab -i5a2rr -i1as. -i1a2s1m -i1ass -i5a2st. -i1at. -i5a2ts -i1au -ia1z -i5a2z1z -i1be -i6b5eig -i6b5ei1s -ibl8 -ib2le -ib1li -i4blis -i1br -i6b1r2ig -i1bu -i6b5unt -i1bü -i6büb -ic8h -i1che -i6ch5ei -i6cher1b -i1chi -i6ch5i6ns -ich1l6 -i6ch3m -i2ch1n6 -i1cho -i2cht -icht5an -icht3r6 -i1chu -i8ch1w -ic2k -i2c2ks -ickst8 -ick6s5te -ic5l -i1d -id3a6r1m -i1de -3i2deal -ide8na -3i2deol -ide5rö8 -i1di -id2io -i6diot -idr4 -id5rec -i2d1t -ie1a -ie1b -ie1ba -ie6b5ar -ie1be -iebe4s3 -ie2bl8 -ie1b1r -ie8bra -ie4bre -ie8bä -ie1d -ie2dr4 -ie1e8 -ie1f -ie6f5a1d -ie2f5f -ie2f1l -iefr6 -ie4fro -ie2f1t -i1ei -ie1le -ie4l3ec -ie8lei -i1e4lek -i3e6ll -i1en. -i1e2n1d -ie1n6e -i3en1f -i5enn -ien1ne -ien6ne. -i1en1p2 -i1en1r -ie6ns -i5ensa -ien1sta -ien8stal -i5en1v -i1enz -ie5o -ier3a4b -ie4rap -i2e1re -ie4rec -i2erei -ie6r5ein -ie6r5eis -ie1r8er -ie2rn -i3ern. -i2e1ru -ie8rum -ie8ru2n1d -ie1sc -iesc8h -ie6s5che -ie1ta -ie6tau -ie1te -ie8te2r1t -ie5the -ie1tr6 -ie6t5ri -i1e2t1t -ie5un -iex5 -2if -i1fa -if5ang -i6fau -i2ff -if1fr6 -if5lac -if1li -i5f6lie -ifr6 -i1fre -i2ft -ift5a -if6t5r6 -i1ga -ig3a2r1t -2i1ge -ige1s -i8gess -ig5he -i5gla -ign8 -ig2ni -i5go -i1gr4 -i2g3rot -i2gs -ig3s2p2 -i1ha -i8ham -i8ha6ns -i1he -i1hi -ih1n -i6h1r -i1hu -i8hum -ih1w -8i1i -ii2s -ii2t -i1j -i1k -i1ka -i6kak -i8kerz -i6kes -ikl8 -ik4ler -i1ku -i6k5unt -2il -i5lac -i1lag -il3a6ns -i5las -i1lau -il6auf -i1le -ile8h -i8lel -il1f -il2fl -i1li -il2ip -i4l3ipp -i6ll -il1le -il6l5enn -i1lo -i2l1t -ilt8e -i1lu -i1lä -i1ma -i8ma2r1t -i2m1b2 -i1me -i8me1le -i1mi -i8mi1d -i2m1m -im1me -imme6l5a -i1mu -i1mä -i5mö -i1n2ah -ina5he -i1nat -in1au -inau8s -i2n1d -8ind. -in4d3an -in1de -5index -ind2r4 -in1du -3indus -i1ne -i5nec -i2n1ei -i8ner1w -in1f -in1fe -3infek -1in1fo -in1ge -ing8en -5ing2e1ni -in2gs -ing5s6o -in1h -5inhab -i1ni -in6ie -ini5er. -5in1j -in1kä -in8kät -in1na -in8nan -i1no -inoi8d -ino1k -in3o4ku -i6ns -in5sau -in1sp2 -5in1s2pe -ins1ti -5instit -in1str6 -5instru -ins1z -ins4ze -in1te -5inte1re -5inter1v -in3the -in5t2r6 -i5ny -inä2 -i1när -in1äs -inö8 -in5öd -i1nös -2io -io1a8 -io1c -io1d -io1de4 -io2di -ioi8 -i1ol. -i1om. -i1on. -i5on1b2 -i1o6n2s1 -i1ont -i5ops -i5o8p1t -i1or. -i3oral -io3rat -i5orc -i1os. -i1ot. -i1o8x -2ip -i1pa -i1pi -i1p2l8 -i1pr6 -i1q -i1ra -ir1b -ir6bl8 -i1re -i1ri -i6r1m -ir1me -ir8me8d -ir2m1o2 -i2rn -ir1na -ir8nak -i1ro -i2rr -irr1h8 -ir5rho -i2r1s -irsc8h -ir6schl6 -ir6sc6h5r -i5r6us -i5ry -i5rä -i1sa -i1sam -i8sa2m1t -i6sar -i2s1au -isc8h -i1sche -i8scheh -i8s6chei -is6ch5m -isc6h3r -ischä8 -i1se -is8e1le -ise3ra -i4s3er1h8 -is3e2rr -i1si -isi1d -isi6de -i8si2n1d -is1k2 -is1k2o -is4kop -i1so -is2on -iso1n5e -isp2 -is1p2o -is6por -i1stu -i8s5tum -i5s1ty1 -i5sö -i1ta -i4t5ab. -i2t1a2m -i8ta1x -i1te -ite2r1s -i8tersc -i1thi -i1tho -i5t6hr6 -i4t8hä -i1ti -i8ti8d -iti1k -iti6kl8 -i2t1m8 -it1me -itmen4 -i1to -i8tof -itr6 -it3ran -it3rau -i1tri -itr2i5o -i2ts -it1sc -it2se -itsp2 -it5spa -i2t1t -ittr6 -it8tru -i1tu -i2tz -it1ze -it6z5e8r1g -it6z1w8 -i1tä -itä6r5e8 -ität2 -itä2ts5 -i1tü -i1u -iu6r -2i1v -i6va1d -iv2a1ti -iva8tin -i1ve -i8vei -i6v5e1ne -iver1 -i8ver1h8 -i2vob -i8vur -i1w -iwi2 -i5xa -i1xe -i1z -i1ze -ize8n -i1zi -i8zir -i6z5w8 -iä8m -i1ä6r -i5ät. -i5äv -i1ö8 -iü8 -i1ße -i6ß5e2r1s -ja5la -je2t3r6 -6jm -5jo -jo5as -jo1ra -jou6l -j2uc -juc8h -ju5cha -ju1ge -jugen4 -juge2n1d5 -jun2g5s6 -3jä -1ka -kac8h -8ka2chs -8ka2k1z -ka1la -ka6l5d -ka2m5t -ka1na -2k1an5l6 -8kapf -k8a6pl8 -ka5r6a -kar1b -6k3ar1be -ka1ro -kar1p -kar6p5f -ka2r1t -4k3arti -8karz -ka1rä -ka1si -kasi5e -ka1te -ka6te1b -ka2t1t -kat8ta -kau2f6s -kau3t2 -2k1b -2k1c -4k1d -ke6hr -keh2r6s -kehrs5a -8keic -2k1eig -6k5ein -6k5eis -ke1la -ke6lar -ke1le -ke8leis -k4e8lo -8ke2mp -ken1te -k5ente. -k3en2t1f2 -8k5en2t1s -6k1en2t1z -ke1ra -ker1l -k5erlau -2k1f8 -2k1g -2k1h -k2if -ki5fl -8ki1k -kin2g6s5 -6kin1h -k2io -ki5os -ki5sp2 -ki5th -8ki8ö8 -2k1k2 -kl8 -1kla -8klac -kla1ge -k5lag8er -kle1b -kle4br -k3leib -3klei1d -kle5isc -4k3leit -k3lek -k1ler -6k5l6er. -5klet -k1li -2klic -8k3lig -k2lim -k2lin -5kl2ip -5klop -k3lor -1klä -2k1m -k1ma -k3man -kma1ni -kman6i5e -kn8 -k1ne -6kner -k2ni -knä8 -1k2o -ko1a2 -ko1d -ko1de -ko6de. -ko1i -koi8t -ko1mi -ko6min -ko1op -ko1or -ko1ph -ko6pht2 -ko3ra -ko8rd -kor1de -kor6d5er -ko5ru -ko2ts -ko5t6sc -k3ou -3kow -6k5ox -2k1p2 -k1q -1kr8 -4k3ra1d -2k1rec -4k3reic -kre5i1e -2krib -6k1r2ig -2kr2ip -6kro1ba -2ks -k1sa -k6sab -k1sal -ksa2l8s -k1sam -k8sa2m1t -k6san -k1sc -k1se -k2s1ex -ksp2 -k5spat -k5s1pe -k1spi -k8sp2il -ks1p2o -ks6por -k1spr6 -kst8 -k1su -k2s1uf -2k1t -kta8l -kt5a6re -k8tein -kte8re -k2t1h -k8tin1f -ktr6 -k2t3rec -k2t1s -1ku -k2uc -ku1c8h -kuc2k8 -k3u6hr -k6ui -ku5ie -ku2m2s1 -kun1f -kun2ft -kunf2ts5 -ku6n2s -kunst3 -ku1ra -ku8rau -ku4ro -kurz1 -4kus1ti -ku1ta -ku8ß -6k1v -2k1w -ky5n -2k1z -1kä -kä4m -4k3ä1mi -kä1se5 -1kö -kö1c -kö1s -1kü -kü1c -kü2r1s -kür6sc -1la. -8l1a2b1f8 -8la2b1h -la1b2r -2l1a8b1s -lac8h -lac6h3r -la1d -la8dr4 -5la1du -8l1a2d1v -6la2ff -la2f5t -la2gn8 -la1ke -5laken -8la2m1b -la1me -la6mer -la2mp -lam1pe -5lampe. -2l1a2m1t -la1na -1la2n1d -lan4d3a -lan4d3r4 -lan4gr4 -lan1m -8lan1me -6lann -8la8n1w -6lan1ä -8l1ap1pa -lap8p1l8 -lap6pr6 -l8ar. -la5ra -la4r4af -la8rag -la8ran -la6r5a6s -lar1b -l3ar1be -l2a1re -la8rei -la6r1m -6larm. -la8sa -la1sc -la8sta -l2a1t8i -6l5a2t1m8 -4laus3s4 -4l1au1to -1la1w -2lb -l1ba -l8bab -l8bauf -l1be -lbe1d -l8be1de -l1bi -l4b3i6ns -lbl8 -l5blo -l8bs -lb1sta -lbst5an -lbs1t3e -8lc -lc8h -l1che -l8che2r1t -l1chi -l6ch3m -l5cho -l8ch5w -6ld -l1de -l4d3ei -ldr4 -ld1re -l1dü -l6düb -le1b -le2bl8 -le1br -le8bre -lec8h -le2cht -lech2t6s5 -le1d -led2r4 -le1f -6le2ff -le1ga -le4gas -1le6hr -lei6br -le8in1f -8leinn -5lei1stu -le2k1t -4lektr6 -le1le -le1ler -le6l5e2r1s -l4e1mo2 -8le2mp -l8en. -le2n1d -8len2ds -len1du -6lendun -le1ne -l6enen -le8ne2n1d -len8er1w -6l5en2t1s -4l3en2t1w -4l1en2t1z -8len1zy -8leo1z -6le1pi -le6p2ip -8le1p2o -1ler -l6er. -ler1b -8ler8bs -le8rd -6l5er1de -le1re -l2erei -le8reis -le8re2n1d -le4r3er -4l3e8r1g -l8er1gr4 -le6rk -6ler1kl8 -ler1zi -6l5erzie -8lerö8 -le1se -8lesel -le1si -lesi5e -les1k2 -le3s1k2o -le3th6a -le2t1s -5le2uc -4leu1ro -leu4s3t -le5xe -6l1e8x3p2 -l1f -2l1g -l1ge -lge2n1d8 -l8gh -lg1li -lg2lie3 -l3glie1d6 -6l1h -1li -li1ar -li1as -2lic2k -li1d -li8dr4 -li1en -li5en6n -li8e2r1s -li8e2r1t -2ließ -3lig -li1ga -li8ga8b -li1g6n8 -l2il -li1l8a -8li2m1b2 -li1na -li2n1d -4l3in1du -lin2gs5 -4l3in1h -6l5in1j -lin2k4s3 -4lin2k1t -2lint -8lin1v -l2ip -4lipp -5lip1t -li1sa -4li1sam -l2i1v -livi5e -6l1j -6l1k -l8keim -l8kj -lk2l8 -l1k2o -lko8f -lkor8 -l2ks -lk2sa -lk2se -6ll -l1la -ll3a4be -l8la2b1t -ll8an5l6 -l2l1b -l8l1c -l6l1d6 -l1le -l4l3eim -l6l5ei1se -l1ler -ller3a -l4le1ti -l1li -l5l2ip -l1lo -l4l3o2r1t -ll5o1v -l2ls -ll1sp2 -ll6spr6 -l2l1t -llte8 -l1lu -l4l3u8r1g -l1lä -l5lü -l6lüb -2l1m -l1mo -l6m5o6d -6ln -l1na -l1no -8lo1bl8 -lo6br -loc8h -3lo2ch. -lo1fe -l5o4fen -l8o1g -lo1ge -5loge. -5lohn -4l3o6hr -1lo1k -l2on -lo1pe -4l3o4p8er -lo1ra -2l1o8rd -6lo8r1g -4lo2r1t -lo1ru -1los. -l6o1se -lo8sei -lo1si -3losig -lo1v -lo6ve -lo1wi5 -6l1p -lp2f -l1ph -l8pho -l8p1n -l2pst -lp4s3te -l2p1t -l1q -8l1r -2ls -l1sa -l6s1a6r1m -l1sc -l1se -l8sec -l6s5e8r1g -l4s3e2r1s -l8s1h -ls1l -l5s6la -l1sp2 -ls1p2o -ls4por -ls2pu -l1str6 -l1su -l8su1ni -l1sü -2l1t -l6t5a2mp -l4t3ein -l5ten -l6t5eng -l6t5er1p -l4t3hei -lt3her -l2t1ho -l6t5i6b -lt2i1l -ltr6 -l8trö -l2ts -lt1sc -lt1se -lt6s1er -lt4s3o -l1t5u2ms -lu8br -lu1d -lu2dr4 -lu1en8 -8lu8fe -lu2ft -luft3a -luf8tr6 -lu6g5r4 -2luh -l1u6hr -l6ui -lu5it -5l6uk -2l1u2m1f6 -2l1u2m1w -1lun -lu1ni -6l5u6n2io -4l3un1te -lu5ol -4lu8r1g -6lu2r1s -l3u2r1t -lu4sto -lus1tr6 -lu6st5re -lu8su -lu6tal -lu1te -lu6t5e6g -lu8te8r1g -lu3the -lu1to -lu6t5or -lu2t1r6 -lu6ß5 -l1v -l1ve -lver1 -lv2e5r6u8 -2l1w -1ly -lya6 -6ly2m1p2 -ly1no -l1ze -l8zess -l1zo -l8zo8f -lzw8 -l3zwei -lz5wu -3lä2n1d -lä5on -lä6sc -lä2t1s -5läuf -2l1äug -läu6s5c -lä5v -l1öl -1lös -lö1ß6t -6l1ü1b6e2 -1ma -8m1a2b1g2 -mac8h -ma5chan -ma1d2 -ma5el -4ma2g1g -ma1g8n8 -ma1la -ma8lau -ma6l5d -8mal1de -ma1li -mali5e -m8a1lu8 -ma8lut -2m1a2mp -3man -ma2n1d2 -man3ds -8man1gr4 -ma1ni -man2i5o -ma6ns -8m5an1st -6m1ap1pa -mar1b -4m3ar1be -ma6rk -mar8kr8 -ma1r4o -ma2r1s -marsc8h -mar8s6ch2m -3mas -ma1sc -ma1tö -4m5auf -ma5yo -2m1b -m1b6r -2m1c -2m1d -m2ds -md6sä -1me -me1c8h -me5isc -5me6ld -me2ls -mel8sa -8me2mp -me5nal -me2n1d -men4dr4 -me6ns -men1sc -mensc8h -men8schl6 -men8s8chw -men2t1s -8mentsp2 -me1ra -me8r1g -mer4gl -me1ro -3mes -me1se -me6s5ei -me1th -me8ß -2m1f6 -2m1g -2m1h -1mi -mi1a -mi6a1le -m2il -mi1la -2m1i2m1m -mi1na -mi5nü -mi1sa -mi4s3an -mit1h -mitr6 -mi5t6ra -3mi2t1t -mitta8 -mi6ß5 -6mj -2m1k8 -2m1l -2m1m -m1ma -m6ma1d2 -m6m5ak -m1me -m8menth -m8men2t1w -mme6ra -m2m1n8 -m2ms -mm5sp2 -mm5u2ms -mmu2t5s -m8män -m1n8 -m5ni -1mo -mo5ar -mo1d -mo4dr4 -8mof -m8o1g -mo1ga -mo8gal -mo1k -mok2l8 -mo4kla -mo6l5d -m2on -mo2n1d -mon8do -mo1no -mo4n3o1d -m1o6ns -mon2s1tr6 -mon1t8a -6m5ony -mo1pa6 -mo1ra -mo8rd -mor8d5a -mo1sc -mo1sp2 -5mot -moy5 -2mp -m1pa -mpfa6 -mpf3l -m1ph -mphe6 -m1pi -mpin6 -m1pl8 -mp2li -m2plu -m1p2o -mpo8s1te -m1pr6 -mprä5 -mp1t -mp8th -m1pu -mput6 -mpu5ts -m1pö -8m1q -2m1r -2ms -ms5au -m1sc -msc8h -msch4l6 -msp2 -ms6p2o -mspr6 -m3spri -m1str6 -2m1t -mt1ar -m8tein -m2t1h -m2ts -mt6se -mt8sä -mu5e -6m5uh -mu1mi1 -1mun -mu2n1d -mun6dr4 -mu1se -muse5e -mu1ta -2m1v -mvol2 -mvo6ll3 -2m1w -1my -2m1z -mä1k -mä6kl8 -1män -mä1s -mä5tr6 -mäu4s3c -3mäß -mö1b2 -6möl -1mü -5mün -3müt -1na. -n5ab. -8n1a2b1n -n1a8b1s -n1a8b1z -na6bä -na2c -nac8h -na1ch3e -3na2cht -1nae -na5el -n1afr6 -1nag -1n2ah -na8ha -na8ho -1nai -6nair -na1k2o -na4kol -n1a2k1t -nal1a -8na1ly -1na1ma -na1me -na4mer -na1m2n8 -n1a2mp -8n1a2m1t -5nan1c -nan6ce -n1a2n1d -n6and. -2n1ang -1na1ni -1nann -n1a6ns -8na8n1w -5napf. -1n2ar. -na2ra -2n1ar1c -n8a8rd -1n2a1ri -n8a6rk -6n1a6r1m -5n6a2r1s -2n1a2r1t -n8ar1v -6na2t1m8 -na2ts -nat6s5e -1nau1e -4nauf -n3aug -5na6ui -n5a6uk -n6a5um -6n1au2s1b2 -6n1au1to -1na1v -2na1x -3na1z -1n8aß -n1b2 -n1ba -nbau5s -n1c -nc8h -nche5e -n6ch5m -2n1d -nda8d -n2d1ak -n6d5a6ns -n1de -n2d1ei -nde1la -nde8lac -nde2ls -ndel6sa -nder1h8 -n8derhi -nde4se -nde1sta -nde8stal -n2d1j -n2d1n6 -nd1ni6 -ndn6is5 -n1do -n6d5o2r6t -ndr4 -nd3rec -nd3rot -n2ds -nd1sa -nd1sam -nd8sa2m1t -nd6sau -n2dt -ndt1h -n1du -n8du2m1d -1ne -ne5as -ne1b -ne2bl8 -6n5e2b1n -2nec -5ne1ei -ne5en -ne1g4l -2n1egy -4n1ein -8neis -ne1le -4n3e4lem -8ne2m1b -2n1e2mp -nen1a -1ne1ne -6n5e4ne8r1g -nen3k -8nen2t1b2 -4n3en3th -8nen2t1l -8n5en2t1n -8n5en2t1s -ne1ra -ne5r8al -ne8ras -ner1b -8ner1bi -ne8rd -ner1de -6n5erde. -ne1re -n2erei -nere5i6d -ner1f -ner1fo -nerfor6 -ner1h8 -6n5erhö -ner1l -8nerlö -2n1e2rr -ne2r1s -n8ers. -ne2r1t -nertr6 -6n5ertra -2n1erz -ne1si -nesi3e -net1h -neu4ra -neu5sc -8neuß -n1f -n2f5f -nf2l -nfl8e -nflei8 -nf1li -nf5lin -n2ft -nf2ts -nft8st -n1ga -n8g5ac -n6g5d -n1ge -ng8en -nge1ra -nge8ram -n2g1g2 -ng1h -ng1li -n6glic -n1gr4 -ng3r2ip -ng8ru -n2gs -ng2se4 -ng2si -n1gu -n2g1um -n1gy -n1gä -n8gäl -n1h -nhe6r5e -1ni -ni1bl8 -nic8h -ni5chä -ni1d -ni1de -ni8de1e -n6ie -ni1en -nie6s5te -niet5h -ni8e2t1n -n2i1ge -4n3i6gel -n6i1k -n2il -ni1la -2n1i2mp -ni5na -2n1i2n1d -8nin1f -6n5in1h -1ni1ni -ni8nit -6n5inn -2n1i6ns -4n1int -n6is -nis1tr6 -ni1th -ni1tr6 -n1j -n6ji -n1ka -n8ka1d -nk5a6ns -n1ke -nker1l -n8kerla -n1ki -n6k5in1h -nkl8 -n5klö -n1k2n8 -n8k5not -n1kr8 -nk3rot -n8krü -n2ks -nksp2 -nk5s1p2o -n2k1t -nk6t5r6 -n1ku -n8kuh -n1kü -n6küb -n5l6 -n1li -nli4mi -n1m -n1me -nme6n4s -n1na -n1ne -n8ne8r1g -n1ni -nn2i5o -n1no -nn1ta -nn4t3ak -nnt1h -n1nu -nnu1e -n1ny -n1nä -n1nö -n1nü -no5a -no1bl8 -no4b3la -4n3o8bs -2no2b1t -noc8h -no1che8 -no1d -no1di -no6die -no4dis -no8ia -no5isc -no1le -6n5o6le3u -no1ma -no4mal -n8o1ni -non6ie -noni6er -2n1on1k -n1ony -no1pe -4n3o4p8er -6nopf -nop1t -6n1opti -no3ra -n1o4ram -no8rd -nor6da -4n1o8r1g -2n1o2r1t -n6os -no1st -8no2st. -no1ta -no8tan -no1te -no8ter -no1ty1 -noty6pe -6n5ox -n1p2 -n1q -n1r -nrös3 -6ns -n1sac -ns3ang -n1sc -n1se -n8sel1f -n8s5er1f -n8se8r1g -n6se6rk -ns5er1w -n1si -n8sint -nsp2 -n1s2pe -n1spr6 -n1sta -n6s5tat. -n1sto -n6stob -n1str6 -n1ta -n1tag -n4t3a4go -nt5an1h -n2t3a6rk -n2t3a2r1t -n1te -n4t3eis -nte5n6ar -nte1ne -nte8nei -nter3a -nte1re -nt2e6rei -nt1h6a -nt6har -n3ther -nt5hie -n3thus -n1ti -nti1c -n8tin1h -nti1t -n2t1l -ntlo6b -n2t1m8 -nt1me -ntmen8 -n1to -n2t3o4ti -n1tr6 -ntra5f -ntra5ut -nt8rea -n2t3rec -nt8rep -n4t3rin -nt8rop -n4t3rot -n4trü -n2t1s -nts6an -nt2s1k2 -n1tu -n2t1z -n1tä -n1tö -n8töl -n1tü -1nu -nu1a -nu5el -nu5en -4n1u6hr -n6ui -nu5ie -8nu2m1l -6n5u2ms -6n5u2m1w -2n1u2n1d -6nu1ni -6n5un1r -2n1unt -2nup -2nu6r -n5u1ri -nus1k2 -nu3s1kr8 -nu5ta -n1v -8n1w -1nys -n1za -n6z5a6b -n2z1ar -n6zaus -n1zi -nzi4ga -n1zo -n8zof -n1zu -n6z5unt -n1zw8 -n1zwi -n6zwir -1näc -5nä1e -5n6äi -n8äl -nä6m -nä6re -n5ärz -5näus -n1öl -1nöt -n5ö2z -5nü. -6n1ü2b -5nüß -o5ab. -oa2l -o8ala -o1a2m -o1an -o1ba -ob1ac -o1be -obe4ra -o6ber1h8 -5o4be2r1s -o4b2e1ru -obe1se -obe6ser -1ob1j -o1bl8 -o2b1li -o8bs -ob5s1k2 -3ob2st. -ob8sta -obstr6 -obst5re -ob5s1z -oc8h -o1che -oche8b -o8chec -o3chi -och1l6 -o6ch3m -ocho8f -oc6hr -o3chro -o2cht -och3to -o3chu -o8ch1w -o1d -o2d1ag -o2d1d -od2dr4 -o1de -ode5i -ode6n5e -o2dt -od1tr6 -o5e6b -oe1d -oe1de -o5e6der. -oe8du -o1e1f -o1e2l -o1e2p -o1er. -o5e8x -o1fa -o2ff -of1fa -of8fan -1of1fi -of8fin -of6f5la -o5fla -o1fr6 -8o1g -og2n8 -o1ha -o1he -o6h5eis -o1hi -ohl1a -oh1le -oh4l3er -5ohm. -oh2ni -o1ho -o6hr -oh1re -oh1ru -o1hu -oh1w -o1hy -o1hä -o5ia -oi1d -o1id. -o8i1di -oi8dr4 -o5i2ds -oisc8h -o5i8s2ch. -oi1se -oiset6 -o1i2s1m -o3i2st. -o5i6tu -o1j -o1k -ok2l8 -o1kla -ok3lau -o8klä -o2k1t -1okta -o1la -o6ld -old5am -old5r4 -o1le -ole5in -o1le1r -ole3u -o2l1g -ol6gl -o6l1k -ol2k2l8 -ol2k4s1 -o6ll -ol1la -ol8lak -ol8lauf. -ol1le -ol6lel -ol8less -o1lo -o2l1s -ols1te -ol2ster -ol6s1k2 -o1lu -o1ly -oly1e2 -5olym -o1ma -o2mab -o3m6an -o8mau -o2m1b -om1be4 -o1me -o8merz -o2ms -om5sp2 -o1mu -o1mun -o8munt -o1mä -o1mö -o1na -ona8m -o2n1a1x -o1ne -on8ent -o6n5er1b -8o1ni -on6ie -oni5er. -on1k -on1na -on6n5a6b -o1no -ono1c -ono1k -o4no2k1t -1o6ns -on2t1s8 -o1nä -oo8f -1o8o1g -oo2pe -oo2sa -o1pa -o1pe -op8er -3o4pe1ra -opf2l -o3pf1li -opf3lo -opf3r6 -o1pi -o1pl8 -o2p1li -o5p6n -op8pa -op6p1l8 -o1pr6 -op1t -o3p4ter -1opti -o1pä -o5pö -o1q -o1ra. -o3ra1d -o8ra2d1d -1oram -o6rang -o5ras -o8rauf -orc8h -or5cha -o8rd -or1da -or4d3a4m -or1de -or8dei -or8deu -1or2d1n6 -or1do -or4dos -o1re -o5re. -o2re2h -o8r5ein -ore5isc -or6enn -or1f -or8fla -or8f1li -o8r1g -1or1ga -or1ge -5orgel. -or2gl -o1ri -or2ie -ori1en -5o6rient -o2rn -or1na -or8nan -or8nä -o1ro -o2rr -or1r2h8 -o2r1t -or6t5an -or8tau -or8te1re -o1r6us -o1ry -o1rä -or1ü2 -o1sa -o1sa3i -6o1se -o8se6rk -o1s1k2 -o6ske -o6ski -os2kl8 -os2k2o -os2kr8 -os1n -os1ni -osn6i5e -o1so -o2s1o2d -osp2 -os1pe -o3s4p8er -o1sta -o4stam -o6stau -ostr6 -o3stra -ost3re -o1su6 -o6s5ur -os1z -o5s6ze -o1ta -ot3auf -o6taus -o1te -o6ter1w -o1th -othe5u -o2t6h1r6 -o1ti -o1to -oto1a -otr6 -ot1re -o1tri -o1tro -o2ts -ot1sc -o3t1su -o2t1t -ot6t5e8r1g -ot2t3h -ot2t5r6 -ot8tö -o1tu -ou3e -ouf1 -ou5f6l -o5u6gr4 -o6ui -ou5ie -ou1ra -ou6rar -ou1t6a -o1v -o1wa -o1we -o6wer. -o1wi -o1wi1d6 -o1wo -o5wu -o1xe -oy5al. -oy1e -oy1i -o5yo -o1z -oza2r -o1ze -1o2zea -o1zo -ozo3is -oö8 -o1ße -oß5e2l1t -oß1t -3paa -pa6ce -5pa1d -pag2 -1pak -pa1la -pa8na8t -pa1ni -pan6ie -pani5el -pa1no -pa4nor -pa6n1s2 -1pap -pap8s -p2a1re -pa8rei -pa6rk -par8kr8 -par2o8n -par5o6ti -pa2r1t -part8e -5p8ar5tei -3par2t1n -pa2s1se -pas6sep -pa1th -pa4th6a -1pau -6paug -pau3sc -p1b -8p5c -4p1d -1pe -4peic -pe5isc -2pek -pen3k -pen8to8 -p8er -pe1ra -pe1re6 -per5ea -per5e1b -pe4rem -2pe2rr -per1ra -per8ran -3pe2r1s -4per1si -pe3rü -pe4sta -pe2t2s -p1fe -p2f1ec -p4fei -p2f1f -pf2l -5pf8lanz -pfl8e -pf8leg -pf3lei -2p2ft -pf3ta -p1g -1ph -2ph. -2p1haf -6ph1b2 -8ph1d -6p5heit -ph5e1me -6ph1g -phi6e -8ph1k2 -6phn -p5ho6ll -pht2 -ph3th6a -4ph3t1he -phu6 -6ph1z -pi1en -pi5e2rr -p2il -pi1la -pi1na -pi6ns -5pin1se -p2io -pi8o1ni -pion6i8e -1pis -pi1s2k2 -pi1th -p1k -pl8 -5pla -p2lau -4plei -p3lein -2p1ler -6p5les -p1li -2p3lig -p6li1k -6p5ling -p2li1z -plo1mi -plo8min -6p1m -p1n -1p2o -8poh -5pol -po1la -po8lan -po1ly1 -po3ny -po1ra -2po2rn -po2r1t -por4t3h -po5rö -5po1ti -p1pa -p1pe -p6p5ei -ppe6la -pp5f -p2p1h -p1pi -pp1l8 -ppp6 -ppr6 -pp5ren -pp1s -p2pst -pp2s1te -p5pö -pr6 -3preis -1pres -2p3r2ig -5prinz -1prob -1pro1d -5pr8o1g -pro8p1t -pro6t5a -pro1te -prote5i -8proß -prä3l -1präs -prä1te4 -1prüf -psc8h -p5schl6 -2pst -1p2sy -p1t -p8to8d -p2t1s -5p6ty1 -1pu -pu1b2 -2p2uc -pu1d -pu2dr4 -pu2ff -puf8fr6 -6p5uh -pu6n8s -pu1re -pu8rei -pu5s6h -pu1ta -p1v -p3w -5py -py5l -p1z -pä1de -pä6der -p5ä6m -pä8nu -8pär -pät5h -pä2t1s -qu6 -1q6ui -8ra2b1k2 -ra1bl8 -ra6b1la -3rable -ra2br -r1a2b1t -6ra8b1z -ra1d -ra4dan -ra2dr4 -r8afa -5ra3fal -ra1fe -ra4f3er -ra5gla -ra2g3n8 -6ra1ha -ral5am -5ra6ld -4ra2l1g -ra1li -ra8li6ns -2ra6ll -ra2l5t -ra1me -8ramei -r3anal -r6a2n1d -ran1de -ran8der -ran4dr4 -8ran1f -6ran1ga -5ran1gi -ran8g1li -r3an1gr4 -ra6ns -ransp2 -rans5pa -8ra8n1w -r8anz. -ra5or -6rapf -r8a5pl8 -rap1se -rap6s5er -2r1ar1b -1rar1h8 -r1a6r1m -ra5ro -2r1a2r1t -6r1arz -ra1te -ra8tei -ra1th -ra6t5he -6ra2t1l -r4a1tr6 -ra4t3ro -ra2t1t -r5atta -rau1e -raue4n -6raus. -r5au4sta -rau1te -rau8tel -rau2t5s -ray1 -r1b -rbl8 -rb5lass -r6b1ler -rb1li -rb4lie -r1bo -rbon6n -r1br -rbrec8h -r8bre2cht -r8bs -rb6s5tä -r1ce -r8ces -rc8h -r1che -rch1l6 -r6ch3m -rc6hr -rch3re -r2cht -rch3tr6 -r8ch1w -8rd -r1da -r1dac -rdac8h -r8da2chs -r8d1ap -rda5ro -r1de -rde5i6ns -r1di -rd2io5 -r8dir -r1do -rd3ost -r1dr4 -r8drau -1re. -re1ak -3re1a2k1t -re3a2ls -re1am -re6am. -re1as -re1b -re1be -4reben -re6bl8 -rec8h -rech5a -re1d -r8e1di -re3er -re1f -8re2ff -3refl -2reh -5re1ha -r4ei. -reic8h -rei2ch6s5 -rei1e -8reier -6r1eign8 -re5i2mp -4r3ein1a4 -6r3ein1b2 -6rein1g -6r5einn -6rein1r -4r3ei6ns -r3eint -re1li -reli3e -8r5e2l1t -re2mp -6r1emp1f -2re2m1t -ren5a6b -ren8gl -r3en1ni -1re1no -5ren1te -4r3enth -8ren2t1l -4r3en2t1w -8r1en2t1z -ren4zw8 -re1on -re1q -requ6 -re1q6ui5 -1rer -rer1b -rer4bl8 -6rer8bs -4r3e8rd -rer1h8 -8rerhö -re6rk -8rer1kl8 -rer1l -4r3erla -8rerlö -re2rn -4r3er6ns -6r5er1nä -rer5o -re2rr -rer1re -6r5erreg -re2r1t -r5ertr6 -rer1w -r5erwec -r5erö8 -re2sa -re1sc -resc8h -re8s6ch2m -2ress -re5u8ni -re1w -6rewo -2r1ex -r1f -r1fe -r8fe8rd -rf1li -rf4lie -8r1g -r1ga -r8gah -r1ge -rge1b -rge4bl8 -rge5na -rge1s -rgest4 -rgn8 -rg6ne -r2g1ni2 -r1go -r8gob -r1gr4 -r4g3ret -r2gs -rg1se -rg8sel -r1h8 -r2hy -5rhy1t -ri1ar -ric8h -ri5cha -ri1d -ri2d2g -r2ie -rie2g4s5 -ri8ei -ri1el -ri6e1le -ri1en -ri3er. -rie2r1s -ri5ers. -r2if -ri1fa -ri6fan -ri1fe -ri8fer -ri8fr6 -1r2ig -ri1k -ri8kn8 -r2il -ri5la -ri1mä8 -ri1na -ri2n1d -r8in1de -rin4ga -rin6gr4 -1rinn -rin1ne -6rinner -ri1no1 -ri6ns -r8in1sp2 -4rinst -ri1nä2 -r2io -rio1c -ri5o6c8h -ri1o2d -ri3o6st -2r1ir -r2is -ris1k2 -ri3s1k2o -risp2 -ri8spr6 -ri5s1v -r2it -ri1ta -6r5i6tal -ri5tr6 -r2i1v -ri1ve -ri6ve. -8r1j -6rk -r1ke -rke6hr -rkeh2r6s5 -r1ki -rkl8 -rk1li -r3k2lin -r1k2n8 -r2ks -rkst8 -rk3str6 -r2k1t -rk4t3an -rk6to -r1ku -r6kuh -r1kä -rkä4s3t -r1l -r5li -rli1ne -rline5a -6r1m -r1ma -r3man -r6m1an5l6 -rma4p -r4m3a1ph -r1mi -r8min1f -r1mo -r8mob -r2ms -rm5sa -2rn -r1na -rna8be -r5ne -rn2ei -r6ne2if -r6nex -r6n1h -rn1k -r1no -r6n5oc -r6ns -rn1sp2 -r1nä -r1nü -ro1be -ro6be2rn -6ro8bs -ro1c8h -roc2k -3ro2ck. -ro1d -ro5de -ro1e -4ro1fe -ro1he -ro8he2r1t -1ro6hr -ro5i1d -ro1in -ro5isc -ro1ly -6r5olym -r2on -6r1o8o1g -ro1ph -ro6phan -r3o2r1t -ro1s2p2 -ro5s6w -ro1ta -ro4tau -ro1tr6 -ro6ts -5rout -r1p -r1pe -rp8er -rpe8re6 -rp2f -r2ps -r2p1t -r1q -2rr -r1ra -r1re -r1rer6 -rr1h8 -rr6hos -r5rhö -r1ri -r1ro -rro8f -rr8or -rror5a -r1ru -r3ry -r1rä -r1rö -r1rü -2r1s -r2s1te -r2s1ti -r6sab -r4san1f -r1se -rse6e -rse5na -r2s1h -rs1k2 -r6s1ka -r6ski -rs2kl8 -r8s1k2o -r2s1l -rs2p2 -r1sta -r6stauf -r8ster1w -rstr6 -r8stran -r2s1w -rs1wi3d4 -r2s1z -2r1t -2r2t3a2r1t -r8taut -r5tei -rt5e2i1ge -r8te1pe -r4t3er1h8 -rter1l -r8terla -r4t3hei -r5t6hu -r4t3int -rtr6 -r4t5re2if -r2ts -rt1sc -rt1se -rt6s1er -rt6s5o -rt6s5u -r6t5u2n1d -2r8tu2r1t -ru1be6 -ru1en -1r4uf -ru2fs -ruf4st -r6ui -ru1ie -2r1u2m1g -2r1u2m1l -2ru2ms -ru2n1d -run1de -run8der -run4d5r4 -6run2d1z -6run1f -8ru6ns -2r1unt -2r1ur -r6us -ru6sta -rus1tr6 -ru6tr6 -1ru2ts -r1v -r1ve -rven1 -rvi2c -r1w -r1x -r1za -rz5ac -r6z5al -r8z1ar -r1ze -r8ze8rd -r6z5er1f -rz8er1h8 -r2z1t -rz4t3h -r1zu -r8zum -rä4s1te -räu8sc -r1öf -5rö6hr -rö5le -3rö6ll -rö1mi -5römis -r1ör -rö2sc -3rü2mp -1sa. -1saa -sa1be -s3a4ben -sa2bl8 -2s1a8b1s -6s1a2b1t -6s1a2b1w -sac2k -3sa2ck. -sa1d -sa1de -6s3a4der -1saf -s8a1fa -4s1a2ff -sa5fr6 -1sag -1sai -sa1i2k1 -4s1a2k1t -1sal -sa1la -sa6l1p -4s3alpi -sa2l1t -6salter -salz3a -1sam -s5an1b2 -san2c -1sa2n1d -san1ge -s5angeh -6s1an5l6 -2s1a6ns -6s3an1tr6 -8s1a8n1w -s1ap -s6a1ph -8sa1p2o -sap5p6 -s8ar. -2s1ar1b -3sa8r1g -s1a6r1m -sa5ro -2s1a2r1t -6s1arz -1sas -1sat -s8a1t8a -2s1a2t1l -sa1to -sa8tom -3s8au1e -s5au2ff -sa6u5i -s6aur -2s1aus -5s6au1se -2s1b2 -2s1ca -s4ce -sc8h -8s2ch. -3scha. -scha1d -5scha1de -3schaf -3schal -sch5a1me -8schan1c -8s6ch1b2 -1sche -6s1che1f -8schex -2s6ch1f2 -2sch1g -2s4ch1h -1schi -2sch1k2 -schl6 -5schlag -5schlu -s6ch2m -6sch3mäß -s2chn6 -6sch1n8aß -1scho -6scho8rd -6s6ch1p -sc6hr -3schri -8sch1ric -8sch1r2ig -8schrou -6s2chs -2s2cht -sch3ta -sch3tr6 -1schu -8s6ch3unt -6sch1v -2sch1z -5schö -5schü -2s1c8o -s1c6r -scre6 -6s1cu -2s1d -1se -se5an -se1ap -se1b -se1be -se6ben -se5ec -se1ei -see5i6g -se3er1l -se1f -8se2ff -se1ha -se6han -se8hi -se8hö -sei1d -6s5eid. -2s1eig -s8e2il -5sein. -sei5n6e -6s5ein1h -3s8eit -3sel. -se1la -se4lar -se2lb4 -se1le -6s3e4lem -se1ler -se8ler1l -2s1e2mp -sen3a2c -se1ne -se5nec -6s5en2t1s -4s1en2t1z -s8er. -se1re -s2erei -se8reim -s2e1ri -se1r5inn -se6r1m -8sermä -8s5er1zi -serö8 -6ser1öf -se1um -8sexa -6s1e8x3p2 -2s1f2 -sfa6ll -sfal1le -sfal8ler -2s3g2 -s1ge -sge5b2 -s1h -s8he1w -5s6h2ip -5s4hop -1si -2siat -si1b -sic8h -si2cht -sich2t6s -si1d -si1de -6s5i6de1e -sie1ge -siege6s5 -si1en -si5e2rr -s2i1f2 -si1g2n8 -si6g5r4 -si1k -si1ka -si8kau -sik1i -si4kin -si2kl8 -si8kü -s2il -si1la -si2lb -sil6br -si1na -2s1in1f -sin5g1h -2s1in1h -sin1ne -sinne6s5 -2s1i6ns -si5ru -si5str6 -4s1j -s1k2 -6sk. -s1ka -2skau -ske8l6c -skelc8h5 -s6ke1le -1s2ki. -3s4kin. -s6ki1z -s8kj -6skn8 -s1k2o -2s3kow -s1kr8 -3s2krib -3s2kr2ip -2s1ku -8s1kü -s1l -s8lal -slei3t -s4low -2s1m -s1n -6sna -6snot -1so -so1c8h -so1d -2s1o1do -so4dor -so1fe -6s5o4fen -so1lo3 -s2on -so5o8f -4so1pe -so1ra -2s1o8rd -so8r1g -4s1or1ga -so2u5c -so3un -4s3ox -sp2 -8s3paa -5spal -1span -2s1pap -s1pe -s2pec -s4peis -1s2pek -sp8er -s6pe8r1g -4s3pe2r1s -s6pes -2s1pf -s1ph -8sphi -1s2phä -1spi -spi4e -6s5pig -spi6ns -6s5pin1se -2s1pis -spl8 -2s5pla -s1p2o -2s5pol -5s6pom -6s5pos -6s5po1ti -spr6 -1spra -3s8prec -6s3preis -5spring -6s1prob -1spru -s1pu -s2pul -1s2pur -6s5py -5spän -1spü -s1q -2s1r -2ssa -2s1se -2s1si -2s1so -2s1sä -2ssö -2ssü -2s1sc8h -sse8nu -ssi1ni -ssin6i6s -ssoi6r -2st. -1sta -4s1ta1fe4 -2s1tag -sta3la -6s1ta1le -4s2ta2l1g -8sta6l1k -8s2t1a2m1t -6st5an1f -4st3a6ns -6st5a8n1w -6star1b -sta4te -6staus -2s2t1b2 -6s2t1c -6s2t1d -s1te -4s1te2il -3s2tep -6s5tep1pi -8ste2s1se -6s2t1f2 -2s2t1g2 -2sth -st1h6a -st3hei -s8t1hi -st1ho -st5hu -s1ti -s2ti4el -4s2ti2g1m -6s2t1i2n1d -4s2tin1f -s2ti8r -2s2t1k2 -2s2t1l -2s2t1m8 -1sto -sto6ll -6s5toll. -4st3o1pe -6s5topf. -6sto8rd -6s2t1p2 -str6 -4s1trai -s3tral -6s5tr6aum -3str8aß -3s2trec -6s3tre1f -8s3treib -5s4tre2if -6st1re1no -6stres -6st3re1v -2s2t5r2ig -8s2t1r2is -s8tro1ma -st5r6o1se -4st1r4uf -3strum -6s1träg -2s2t1s6 -2s2t1t -1stu -stu5a -4s1t2uc -2s1tu1e -8stun. -2s2t1v -2s2t1w -s1ty1 -s2tyl -6s2tz -1stä -8s5täg -1stö -1stü -8s5tüc8h -4s1tür. -1su -su2b1 -3s2uc -su1e -su2fe -su1ma -su8mar -su2m1f6 -6sumfa -8su2m1k8 -2s1unt -sup1p2 -su1ra -6s5u6ran -su2r1t -6surte -2s1v -2s1w -1sy -8syl. -sy5la -syn1 -sy2na -sy1ne4 -s1z -s1ze -s4ze2n1d -sze1ne -5s6z4ene. -8s1zu -1sä -6s5ä2n1d -s1äug -6säu1gi -6s1äuß -5söm -2s1ü2b -1süc -sü8di -1sün -5süß -taa2t2s3 -4tab. -t6a1ba -taba6k -ta8ban -ta1b2l8 -ta1br -ta6b1re -4ta8b1s -t3ab1sc -8ta8b1z -tac8h -6t3a2cht -ta1d -ta1de -ta6der -6tadr4 -ta2d6s -ta2d2t1 -1ta1fe4 -1tag -ta6ga6 -ta1ge -ta8gei -tage4s -ta2gs -tag6s5t -tah8 -tahl3 -tai1ne -tai6ne. -ta5ir. -ta2k1t -tak8ta -tal3au -1ta1le -ta8leng -ta1ler -ta6l5e2r1t -ta1me -6t5a6mer -6ta2mp -tam1pe6 -2t1a2m1t -ta2n1d -tan5d6a -tan8dr4 -tan2ds -tand1s5a -ta1ni -tan6i5e -6t1an5l6 -2t1an1r -t3a6ns -8t5an1tr6 -ta1nu6 -t5a8n1w -8tanwa -tan8zw8 -ta1ra -ta8rau -tar1b -6tar1be -1t2a1ri -2ta6rk -2t1a6r1m -ta1ro -2ta2r1t -t3arti -6tarz -ta1sc -ta1si -ta6si1en -tas1te -ta8stem -ta8s1to -t5au2f1b2 -4t1au2f1n -8taus. -5tau1se -8t1au2s1f2 -6t1au2s3g2 -t5au8s1l -2t1b2 -2t1c -tc8h -t6chu -2t1d -te2am -tea4s -te1b -te1be -te8ben -tec8h -5t8e2ch3n6 -te1f -4te2ff -te1gr4 -te4g3re -te1ha -te6hau -2te1he -te4hel -2t1e6hr -tei1d -te5id. -teig5l -6t1eign8 -tei8gr4 -1te2il -4tein1h -t5einhe -4teis -tei1se -t5eisen -8tei1w -te1la -te8lam -te4lar -te1le -4t1elek -8telem -te1ma -te6man -te6n5ag -te1ne -ten8er1w -ten5k -te6ns -ten1s4p2 -ten1tr6 -ten8tro -4t3en2t1w -8t1en2t1z -te1pl8 -te6p1li -5tep1pi -ter5a6b -te3ral -ter5au -ter1b -ter1ba -8terbar -ter1be -t5erbe. -6terben -8ter8bs -4t3er2b1t -te8rd -ter1de -t5erde. -te1re -tere1b -ter5e1be -t2erei -ter5ein -te1rer -te8re2r1s -ter1f4 -ter1h8 -8terhö -te6rk -ter1kl8 -6t3er1klä -te2rn -ter1no -ter8nor -te2rr -ter1re -ter6re. -te2r1s -tersc8h4 -t8erscha -te1se -t5e6sel -te2s1ta -te8stau -t3eu1ro -te1xa -tex3e -8t1e8x3p2 -te8x1t -tex6ta -2t1f2 -2t1g2 -2th. -th6a -5tha. -2t1haa -6t1hab -6t5haf -t5hah -8thak -3t2hal. -6tha2ls -6t3ha2n1d -2t1hau -1the. -3t4hea -t1he1b -t5he2il -t3heit -t3hel1f -1t8heo -5t4he1rap -5ther1f -6t5herz -1thes -1thet -5thi. -2t1h2il -t3him -8thir -3this -t5h1j -2th1l -2th1m -th1n -t5hob -t5hof -4t1holz -thop1t -6th1opti -1t6hr6 -4ths -t1hum -1thy -4t1hä -2t1hö -t1hü -ti1a2m -ti1b -tie1f -tie1fe -tie6fer -ti1en -t2i1ge -ti8gerz -tig3l -ti1k -ti8kin -t2il -ti5lat -1ti2l1g -t1i2n1d -tin4k3l8 -tisp2 -ti3spa -ti5str6 -5ti1te -ti5tr6 -t2i1v -ti1ve -ti8vel -ti8vr -2t1j -2t1k2 -2t1l -tl8a -2t1m8 -2t1n -3to1be -8t1ob1j -toc8h -to3cha -5to2cht -8toc2k -to1d -to1de4 -to8del -to8du -to1e -to1fe -6t5o6fen -to1in -toi6r -to6ll -5toll. -to1me -to8me1ne -t2o6ns -2t1ony -to1pe -to4p8er -5topf. -6top1t -to1ra -to1s -to2s1te -to1s1k2 -to6s1ka -tos2l -2to1ti -to1tr6 -t8ou -2t1p2 -6t1q -tr6 -trac8h -tra5cha -tr8afa -tra8far -tra2f5t -1trag -tra6gl -tra6gr4 -t3rahm -1trai -t6ra6ns -tra3sc -tra6st -3trau1e -t4re. -2trec -t3rec8h -t8rec2k -6t1re1d -t8ree -4t1reg -3treib -4tre2if -8t3reis -8tre1p2o -tr6e6t5r6 -t3re1v -4t3re1z -1trib -t6ric2k -tr2ie -tri6er -2t1r2ig -t8rink -tr2io -tri6o5d -tri1z -tri1zi5 -tro1a -3troc -troc2k -trocke6 -tro5i8d -tro1ma -tro3m6an -tro8man. -tr2on -tro3ny -5tropf -6t5ro1sa -t5roß -5trub -5trup -trut5 -1träg -6t1röh -5trüb -tr5ü3bu -t1rüc -t1rüs -2ts -ts1ab -t1sac -tsa8d -ts1ak -t1sal -t6s5a2l1t -ts1an -ts1ar -ts3auf -tsc8h -t3sc6hr -t5schä -t1se -tse6e -tse1e5i -tsei6n6s -ts3ent -ts1er -t8ser1f -t4se6rk -t8s1h -t1si -5t6si1k -t4s3int -t1so -tso2r1t -ts5ort. -tsp2 -ts1p2o -t5s6por -tspr6 -t6sprei -t1st -t2s1te -t1sta -t6s5tanz -t2s1th -ts1ti -t6stit -t1sto -t4s3tor -t1su -1t2sua -t2s1uf -t8sum. -t2s1u8n -t2s1ur -2t1t -tt5e2if -tte6sa -tt1h6a -ttr6 -tt8ret -t2ts -tt1sc -tt1se -tt8s1er -tt5s6z -1t2uc -tuc8h -tuch5a -1tu1e -6tuh -t5u6hr -t6u1i -tu6it -1tu2m1h -6t5u2m1r -1tu2ms -8tu2m1t -6tu2n1d -6tun1f -2t1unt -tu5ra -tu6rau -tu1re -tu6re. -tu4r3er -2t1v -2t1w -1ty1 -ty6a -ty8la -8tym -6ty6o -2tz -tz5al -tz1an -tz1ar -t1ze -t8zec -tzeh6 -tzehn5 -t6z5ei. -t1zo -t6zor -t1zu -t4z3um -t1zä -t6zäu -5täg -6täh -t5ä2l1t -t8än -tä1re8 -8tä8st -6t1äuß -tö2ff -töf5fe -t5öffen -8tö8k -1tön -4tüb -tü1b6e2 -t3ü4ber1 -t6ü5b8er. -5tüc8h -1tür. -u3al. -u5a2lb -u5al1f -u3a6l1h -u5a6l1k -u3a6l1p -u3an. -ua5na -u3a2n1d -u5a6ns -u5ar. -ua6th -u1au -ua1y -u1ba -u2bab -u1bi -ub6ie -ubi5er. -u1br -u6b5r2it -u8bs -ubs2k2 -u5bö -u1bü -u8büb -2uc -uc8h -u1che -u6ch5ec -u1chi -uch1l6 -u6ch3m -u2ch5n6 -uc6h1r -u2cht -uch5to -uchtr6 -ucht5re -u1chu -u8ch1w -uc2k -uc1k1a -uck5in -u1d -ud4a -u1ei -u6e1la -ue1ne8 -u6ep -u1er -uer1a -ue1re -ue1rer -ue8rer1l -uer5o -u8e1sc -u2est -u8e1v -u1fa -u1fe -u2f1ei -u4f3ent -u8fer1h8 -u2ff -uf1fr6 -uf1l -ufr6 -uf1ra -uf1re -uf1rä -uf1rü -u2fs -uf1s2p2 -uf1st -u2ft -uf2t1s -u1ga -u8ga2b1t -u8ga1d -u6gap -u1ge -uge1b8 -u8gn8 -u1go -ugo3s4 -u1ha -u1he -u1hi -uh1le -u1ho -u6hr -uh1re -u1hu -uh1w -u1hä -u1hö -6ui -ui5en -u1ig -u3i6ns -uin1te -uin8tes -uisc8h -u5i8s2ch. -u1j -6uk -u1ke -u1ki -u1kl8 -u8klu -u1k6n8 -u5ky -u1la -u6ld -ul2ds -uld8se -u1le -u6ll -ul1la -ul8lac -ul6lau -ul1le -ul6le6l -ul6lo -u6ln -ul1ni8 -u1lo -ulo6i -u2l1t -ult6a -ult8e -u1lu -ul1v -ul2vr -u1lä -u1lö -u2m1f6 -3umfan -u2m1l -5umlau -u1mo -u8mo8f -u2mp -um1ph -um8pho -u1mu -umu8s -u5mö -u1n1a -un2al -un6at -unau2 -u2n1d -6und. -un1de -un2d1ei -5undein -un1du -un4d3um -un2d1z -3undzw8 -un1dü8 -un8düb -u1ne -une2b -u2n1ec -une2h -u8n3eis -un1f -3unfal -1un1fä -un1ge -5unge1a2 -3un1glü -un2g2s1 -un8gä -u1ni -1u2n2if -un4it -un1kr8 -un8kro -un2k5s -u1no -un1p2 -unpa2 -u6ns -uns2p2 -un1v -unvol4 -unvo6ll5 -u5os. -u1pa -u1pi -u1p2l8 -u1pr6 -u2p4s3t -up1t -up2t1a -u1q -u1ra -ur5a8b1s -ura8d -ur5ah -u6rak -ur3a2l5t -u6rana -u6r5a6ns -u8rap -ur5a6ri -u8ra2t1t -u1re -ur3eig -u8r1g -ur1gr4 -ur8gri -u1ri -ur5i6ns -ur1l -3urlau -u6r1m -ur1me -urmen6 -u2rn -ur1na -ur8nan -u1ro -u2r1s -3ursac -ur8sau -ur1se -ur8sei -ur4s1k2 -u2r1t -3ur5tei -u1ru -ur6u5i6 -u2r1u6r -u1ry -ur2za -ur6zä -ur5ä6m -u5rö -u1rü -urü8c2k3 -u1sa -u1sag -usa4gi -u2s1ar -u2s1au -usc8h -u1sche -u8schec -us8chw -usch5wi -u1se -u2s1ei -use1ke -use8kel -u8s1l -u1sta -u4st3a4b -us3tau -u1su -u2s1uf -u8su2rn -ut1ac -u1tal -uta8m -u1tan -ut1ar -u1tas -ut1au -u1te -u8teic -u4tent -u8ter1f4 -ut2e1ri -u6terin -u4t3hei -ut5ho -ut1hu -u1ti -uti1ne5 -uti6q -u1to -uto5c -u1tr6 -u2ts -ut1sa -ut1s6p2 -ut1st -utstr6 -ut6stro -u1tu -u2tz -utz5w8 -u1u -u1v -u1ve -uve5n -uver1 -uve3r4ä -u1w -u1xe -u5ya -uy5e6 -u1yi -u1ze -u2z1eh -u8zer1h8 -u5ö -u1ße -uße6n -uße1n5e -8van1b2 -6vang -6var1b -va8r8d -v8a6t5a -va1te -va8tei -v4a2t1r6 -2v1b -6v5c -6vd -1ve -6ve5g6 -ver1 -ver5b -verb8l8 -ve2re2 -ve8r1g8 -v2e2ru8 -ve1s -ve2s3p2 -ve3xe -2v1f -2v1g -6v5h -vi6el -vie6w5 -vi1g4 -v2il -vi1le -vi8le8h -vi6ll -vil1le -vil6le. -8vint -vi1ru -vi1tr6 -2v1k -2v1l -2v1m -4v5n -8vo8f -vo2il -voi6le -vo6ll -vol1le -vol8le2n1d -vol8li -v2or1 -vo2re -vo1ri -vo8rin -vo2ro -2v1p -8vra -v6re -2v2s -2v1t -2v1v -4v3w -2v1z -wa2ff -waf5fe8 -wa6g5n8 -1wah -wah8n -wa5la -wa6ld -wal1di -wal8din -wa2l1t -wal6ta -wa2n1d -wan4dr4 -5w2a1re -wa8ru -war4za -1was -w5c -w1d -5wec8h -we1f -we6fl -1weg -we1ge -we8geng -weg5h -weg3l -we2g1r4 -we6hr -weh1re -weh6r5er -5wei1se -weit3r6 -we2l2t -welt3r6 -we6rat -8wer1c -we8rd -5wer1du -wer1f -wer4fl -we6rk -5werk. -wer4ka -wer8ku -we2r1t -wer4ta -wer8te6r1m -we2sp2 -wes1te -we8s4te2n1d -we8str6 -we8stö -we2t1t -wet8ta -wic8h -wi2chs -wich6s5t -1wi1d -wi2dr4 -wie1d -wie1de4 -wieder5 -wi1k6 -wi2m1m -wim6ma -wi2n1d -win4d3r4 -5wi2r1t -wisc8h -wisch5l6 -1wj -6wk -2w1l -8w1n -wo1c -woc8h -wo1che6 -wol6f -wo2r1t -wor6t5r6 -6ws2 -w1s1k2 -6w5t -wu2n1d -wun1de -5wunde. -wun6gr4 -wu1sc -wu2t1 -6w5w -wy5a -wä6r1m -wär1me5 -wä1sc -1xag -x1ak -xa1me -x3a4men -8xa2m1t -x1an -8x1b -x1c -1xe. -x3e4g -1xen -xe1ro -x1erz -1xes -8xf -x1g -8x1h -1xi -8xi1d -xi8so -4xis1te -x1k -6x1l -x1m -8xn -1xo -8x5o6d -8x3p2 -x1r -x1s6 -8x1t -x6tak -x8ter1f4 -x2t1h -1xu -xu1e -x5ul -6x3w -x1z -5ya. -y5an. -y5ank -y1b -y1c -yc8h -y6cha -y4chia -y1d -yen6n -y5e2rn -y1g -y5h -y5in -y1j -y1k2 -y1lak -yl1al -yla8m -y5la1x -y1le -y1lo -y5lu -y8m1n8 -y2m1p2 -y3mu -y1na -yno2d -yn1t -y1on. -y1o4p -y5ou -y1p2o1 -y1pr6 -y8ps -y1r -yr2i3e -y2r1r2 -y1si -y2s5iat -ys8ty1 -y1t -y3w -y1z -yä8m -z5a6b -za1b5l8 -8za6d -1zah -za5is -4z3ak -6z1am -zan1ge -5zange. -8z1an5l6 -2z1a1ra -6z5as -z5auf -3zaun -2z1b -6z1c -6z1d -1ze -ze1d -ze1di -ze4di1k -ze1f -4z3e2ff -8zein -zei4ta -zei1te -zei8te2r1s -ze6la -ze1le -ze8lec -ze2l1t -zel8th -4ze2mp -zen1ge -6z5engel -zen1zi -zen8zin -ze8r1g -8zer1gä -z2e1r8i -ze1ro -ze2r1s8 -ze2r1t -zerta8 -zer8tab -zer8tag -8zerz -ze8s1te -zeu6gr4 -2z1ex -2z1f8 -z1g -4z1h -1zi -zi1en -zi5es. -4z3i2mp -zi1na -6z5in1f -6z5in1ni -zi6ns -zin1se -zin6s5er -zin1su -8zinsuf -zist5r6 -zi5th -zi1tr6 -6z1j -2z1k -2z1l -2z1m -6z1n -1zo -z8o1g -zo6gl -4z3oh -zo1on -zo2rn -zor6na8 -4z1p -z5q -6z1r -2z1s8 -2z1t -z4t3e2n1d -z4t3hei -z8thi -1zu -zu3al -zu1b4 -zu1f2 -6z5u6hr -zu1n2a -zu1ne -8zunem -zun1f8 -8zungl -zu1o -zup8fi -zu1s8 -zu1z -2z1v -zw8 -z1wal -5zwec2k -zwei3s -z1wel -z1wer -z6we8r1g -8z5wes -1zwi -zwi1s -6z1wo -1zy -2z1z -zz8a -z1zi -zzi1s -1zä -1zö -6z1öl. -zö1le -1zü -2z1ü2b -ä1a6 -äb1l8 -äc8h -ä1che -ä3chi -ä2chs -äch8sc -äch8sp2 -ä5chu -äc2k -äc1k5a -äd1a -ä1de -äd5era -ä1di -ä6d5ia -ä1e -ä5fa -äf1l -ä2ft -äf2t6s -äg1h -äg3le -ägn8 -ä1gna -ä6g5nan -ä2gs -äg5str6 -ä1he -ä1hi -äh1le -äh5ne -1ähn5l6 -ä6hr -äh1re -äh5ri -äh1ru -ä1hu -äh1w -6äi -ä1isc -äisc8h -ä6i1sche -ä5i2s1m -ä5j -ä1k -ä8l1c -ä1le -ä8lei -ä2ls -äl1sc -älsc8h -äl6schl6 -ä1mi -ämi1e -äm8n8 -ä2m8s -ä5na -ä2n1d -än1de -5änd2e1ru -ä1ne -äne5i8 -äng3l -änk5l8 -ä1no -ä6ns -än6s5c -ä1pa -äp6s5c -3äq -är1c -ä1re -äre8m -ä8r1g -är1ge -5ärge2rn -är6gl -ä1ri -ä6r1m -är1me -3ärmel -ä1ro -ä2r1t -är2t6s5 -ä1ru -är2z1t -3ärz2t1l -ä5rö -äsc8h -ä1sche -ä6s5chen -ä1se -äse6n8s -ä2s1th -äta8b -ä1te -ät2e1ri4 -äter5it -ä6thy -ä1ti -3ä2t1k2 -ä1to -ä2ts -ätsc8h -ät8schl6 -äts1p2 -ä5tu -äub1l8 -äu1e -1äug -äu8ga -ä6u5i -ä1um. -ä1us. -1äuß -ä1z -ö1b -öc8h -ö1che -ö5chi -ö2chs -öchs1te -öch8s2tei -öch8str6 -ö2cht6 -ö1de -5ö6dem -ö2ff -5öf2f1n -ö1he -öh1l8 -ö6hr -öh1re -ö1hu -ö1is -ö1ke -1ö2k2o -1öl. -ö6l1k -öl6k5l8 -ö6l1p -öl8pl8 -ö1mu -ö5na -ö1ni -öni2g6s3 -ö1no -ö5o6t -öpf3l -öp6s5c -ö1re -ö8r1g -ör8g1li -ö1ri -ö2r1t -ör8tr6 -ö1ru -ös1te -5öste2rr -ö1te -ö5th -ö1ti -ö1tu -ö1v -ö1w -öwe8 -ö2z -ü1b6e2 -3ü4ber1 -üb1l8 -ü1b1r -5ü2bu -üc8h -ü1che -ü1chi -ü8ch3l6 -ü2chs -üch6s5c -ü8c2k -üc1k1a -ück5e2r1s -üd1a2 -ü1de -ü6deu -ü1di -üdi8t -ü2d1o4 -ü2d5s6 -ü1ge -üge4l5a -üg1l -üh5a -ü1he -ü8heh -ü6h5e6rk -üh1le -ü6hr -üh1re -üh1ru -ü1hu -üh1w -ü3k -ü1le -ü6ll -ül4l5a -ül8lo -ü6l1p -ül4ps -ü2ls -ül6s5c -ü1lu -ü2n1d -ün8da -ün1f -ün1fe -ün8fei -ünk5l8 -ün8za -ün6zw8 -ü5pi -ü1re -ü8rei -ür1f -ür8fl -ür8fr6 -ü8r1g -ür1ge -ür8geng -ü1ri -ü1ro -ü2r1s -ür8sta -ü1ru -ü1se -üse8n -ü8sta -üs1te -ü8stes -ü3ta -ü1te -ü1ti -ü2t1t -üt8tr6 -ü1tu -ü2tz -üt1ze -üt8zei -ü1v -ß1a8 -5ßa. -ß8as -ß1b8 -ß1c -ß1d -1ße -ß5ec -8ße8g -8ße8h -2ß1ei -8ßem -ß1f8 -ß1g -ß1h -1ßi -ß1k -ß1l -ß1m -ß1n -ß1o -ß1p8 -ß5q -ß1r -ß1s2 -ßst8 -ß1ta -ß1te -ßt3hei -ß1ti -ß5to -ß1tr6 -1ßu8 -6ß5um -ß1v -ß1w -ß1z -2s1ta. -i1sta -i2s1tal -2s1ta1ni -2s1tan. -fe2s1ta -nd1st -nd2s1te -ve2s1te -3s2tec -stec8h -4s5t8e2ch3n6 -3s2teg -3s2teh -3s2tein -3s2teig -3s2te2if -3s2te6ll -3s2telz -a4s3tel -3s2te2m1m -3s2te2mp -ss1te -s3s2ter -t3s2te2rn -3s2teu1e -6s4t3eu1ro -bs2ti -te2s3ti -ve2s1ti -3s2tic -3s2tie1b -3s2tieg -3s2t2if -3s2t2il -3s2tim -3s2tink -3s2ti1tu -a2s1to -gu2s1to -ku2s1to -i1sto -i2s1tol -i2s1tor -ve2s1to -2s1tung -2s7tus -o1stu -o2s1tul -aus3s4 -ens3s4 -gs3s4 -.mis2s1 -s2s1b8 -s1sche -s2s3chen -s2s3d -s2s5ec -2s2s1ei -s2s3f2 -s2s3g2 -s2s3h -s2s3k2 -s2s3l -s2s3m -s2s3n -s2s3p8 -s2s5q -s2s3r -s2s3s2 -sss2t8 -as2s3te -is2s3te -us2s3te -üs2s3te -s2sth -s2st3hei -s2s3ti -s2s1to -s2s1tr6 -s1su -6ss5um -s2s3v -s2s3w -s2s3z -1cker. -1cke2r1t -c1ka -1cka1d -1cke. -1ckel -1cken -4ck1ent -1cke1re -1cke2rn -1ck2e1ru -1ckie -1ckig -c1ku -1ckun -ta8s7ta7tur -ta1sta -tast6a1tu -.fli7ck8en7tep7pic8h -.f1li -.f2lic2k -.fli1cken -.fli4ck1ent -.flicken1te -.flicken5tep1pi -.mitt7wo8ch7a8be2n1d -.3mi2t1t -.mit2t1w -.mittwo1c -.mittwoc8h -.mittwocha1be -.ja7cken. -.jac2k -.ja1cken -.ja7cken7ta7sche. -.ja4ck1ent -.jacken1ta -.jackenta1sc -.jackentasc8h -.jackenta1sche -.let7s8cho. -.le2t1s -.letsc8h -.let1scho -.men8s7tru7a7ti7on -.1me -.me6ns -.men1str6 -.menstru2a1ti -.menstruat2io -ni7ve8au7st8a7bi7li7tät. -n2i1v -ni1ve -nive1aus -niveau4sta -niveau4st3a4b -niveausta1bi -niveaustab2il -niveaustabi1li -niveaustabili1tä -niveaustabilität2 -.ur7in8stin2k1t -.u1ri -.ur5i6ns -.u4rinst -.urins1ti -.urin3s2tink -.wach8s8tu7be -.wac8h -.wa2chs -.wach1stu -.stau8be1cken -.1sta -.stau1be -.staubec2k -.stau8b7ent7wick7lung -.stauben2t1w -.staub5entwic -.staubentwic2k -.staubentwick3l8 -.staubentwick1lun diff --git a/dist-packages/wordaxe/wordaxe/dict/hyph_en.dic b/dist-packages/wordaxe/wordaxe/dict/hyph_en.dic deleted file mode 100755 index 29b4cce97..000000000 --- a/dist-packages/wordaxe/wordaxe/dict/hyph_en.dic +++ /dev/null @@ -1,11388 +0,0 @@ -abcdefghijklmnopqrstuvwxyz -.ab4i -.1ab -.ab3ol -.abo2 -.ace4 -.ace2t3 -.a2ch4 -.ac5t2iva -.a2ct -.ac2t1iv -.ad4d1in -.ad1d4 -.ad3di -.ad3e -.a2d3o -.4a2e5d -.aer3i -.aer1 -.a2f3f -.a2f3t -.ag4a -.4a2g5n2 -.a2ir3 -.2ai2 -.al5im -.4al1k -.al3le -.all2 -.a1m5a2r -.2a2m -.ama5te -.am1at -.am2i -.am3pe -.am2p -.am3ph -.a2n1 -.an1a3b -.an2a -.an2a3s -.a4nd2 -.an5da -.an4el -.a4n4en -.an4gl2 -.a4ng -.an4on. -.an1o -.a4n3s -.an2t3a -.an3t2i3 -.4ant4ic -.an4t5o -.a2n2y5 -.a3ph5or -.2ap -.a1pho -.ap4i -.ar5ab -.a2r -.ar5ap -.ar4c2i2 -.ar1c2 -.ar5d -.ar4e -.ar1i4 -.ar4ise -.ar4isi -.ar5sen -.a2r2s2 -.ar2se -.ar4t5icl2 -.art2 -.ar1t4ic -.as1 -.as4q -.as5s1ib -.a4ss2 -.at5a2r -.2a2ta -.ateli4 -.ate2l -.at5omi4se -.a2to -.at2om -.atom1i -.at5om1iz -.2a2t3r2 -.a2t3t4 -.au3b -.2au2 -.au3g4u -.aur4e5 -.au4r -.aus5 -.authen5 -.au3th -.authe2 -.2av4 -.av5era -.aver1 -.bap5ti2s1m4 -.b2a4p1 -.ba2pt2 -.bap2t1is -.barri5c4 -.ba2r -.b2a4r1r4 -.bar3ri -.bas4i -.1bas -.ba5sic -.be3d2i -.b2e -.2b2ed -.be3lo -.1bel -.be5r4a -.ber1 -.be5s1m4 -.be1s2 -.bi4er1 -.b4ie2 -.blaz5o -.2bl2 -.bl2a -.b4l2az2 -.bo3lo -.bo2 -.bo1s5o2m -.bou4n4d -.bo4u2 -.b2oun1 -.bov4 -.3bra5ch -.br4 -.br2e2 -.burn5i -.bu4r -.bur2n2 -.ca3de -.ca4gin -.ca1g2i -.cam5i -.c2a2m -.ca1m3o -.ca2n1 -.can5t2a -.ca5p2itu -.1c2ap -.cap1i -.car4i -.1ca2r -.cas5u1al -.3cas1u3 -.cas2ua2 -.ca4ti -.c1at -.cen5so -.1cen2 -.ce4n2s -.cen5ten1a -.3cent -.cen1te -.cen4t5ri -.cen1tr2 -.cer4i -.cer1 -.2ch4 -.cit4a -.1c2i2 -.cle1m5e4 -.cl2 -.c2le2 -.clima5to -.cli1m -.co5i4t -.1c2o2 -.c2oi -.co3pa -.cop5ro -.co1pr2 -.c4o3r1u -.co3si -.co5ter1 -.c2ot -.coty3le5 -.5coty -.cri5t2i1c2i2 -.cr2 -.crit1ic -.cust2om5 -.1cus -.c4ust2 -.cus1to4 -.3d2av5 -.dea5c2o2 -.dea2 -.de5lec -.d2ele -.del5eg -.de3li -.de3l2i5r -.1d4e1m -.de5nit -.den1i -.de3n1o -.der2 -.de3ra -.de5re4s -.d4er2e -.1de3ri -.de5sc2r2ib -.5de2sc2 -.descr2 -.de5ser1v2 -.deser1 -.de5signe -.des4i -.des2i4g -.desi2gn2 -.de5s2ir -.de5s1is2 -.de5sp2oi -.des1p -.des2po -.determ5i -.1de1t -.deter2 -.de3ve -.de4w -.di4al. -.1d2i1a2 -.dia3s -.di4at -.din4a -.2d1in -.di2o5c -.3d2i1o2 -.1do2 -.do4e -.domest5 -.d2om -.dom2e -.do2me2s -.du4al. -.1du -.d2ua2 -.du1al -.3d4u4c -.d4y2s3 -.2d2y -.eas4t5 -.ea2 -.ech1in5 -.ech3i -.e2ch -.e1c2o3 -.e2c3t -.e1d5em -.2ed -.ede2 -.ed4it. -.ed1it -.ed4iti -.eg4 -.ei3d4 -.e2i5r -.e2l3ev3 -.2ele -.el2i -.elu5s4 -.e1lu -.e2m3b -.em5in -.em1p4 -.em5p4y -.en1 -.en5c -.en4d2ed -.2e4nd -.ende2 -.e4n3s -.ent2 -.en5ta -.eo1s5 -.epi1 -.epi3d -.er2a -.er1 -.er5em5 -.2er2e -.er4i4 -.er4o2 -.eros4 -.er2ot3 -.er4ri -.e4r1r4 -.es1 -.esc1al5 -.e2sc2 -.es1ca -.es3p -.es3t2 -.eter2n5 -.eter2 -.eth3e2 -.e2th1 -.eu1 -.eu4r4 -.eval3 -.evol5ut -.e1vo -.evo2l -.ew4 -.ex1 -.ex3a -.eye3 -.fal4le -.1f2a -.fall2 -.far4i -.5fa2r -.fec5un2da -.3fec -.fecun1 -.fecu4nd -.f2e4n4d -.feo2ff5 -.feof2 -.fi2 -.fi5l2i1a2 -.1fi2l -.fil5tr2 -.fi1n5e4ss -.2fin -.fin2e -.fine4s -.f1i4n3g -.fi5n4it -.f2in1i -.fi2s4c5 -.3f2o3c -.1fo -.fran5ch -.1fr2 -.fra2n -.fu5g4a -.3fu -.g2a4m -.ga1m5e2t -.gam2e -.gen4et -.3gen -.g4ene -.ge5neti -.gen5i1a2 -.gen1i -.ge3ro -.ger1 -.glor5i3o2 -.2gl2 -.3glo -.glor1i -.gnost4 -.2gn2 -.g2no -.gno4s -.go3no -.3gos3 -.h1ab2 -.ha2 -.ha5bili -.hab1il -.hama5 -.h2a4m -.han4de2 -.ha2n -.ha4nd -.hast5i -.hast2 -.h4e4i -.he2 -.hem5a -.hi2 -.hi3b -.ho2l -.ho5rol -.hov3 -.hy3lo -.h2y -.hy2l -.ico3s -.2i2c2o2 -.idi2 -.i2d -.2ig3 -.i2g1n2 -.il4i -.i2m5b -.in1 -.2i4n3d -.in3e2 -.2in2i -.2in3o -.in3t -.inve2st5i -.in3v2 -.inve2 -.invest2 -.i4r3r4 -.2ir -.i2s4c2 -.is4li -.i2s1l4 -.is4o -.i1s2o5m -.ka5ro -.ka2r -.ki4e2 -.kin3e -.k1in -.lab4o2 -.l2a -.l1ab -.la4m2e -.l2a2m -.lam5enta -.la3men -.lan5i -.la2n -.lash4e2 -.l4as -.las2h -.le4m -.len5t2i -.le2p -.le1p5r2 -.les5son -.3le4s4s -.les2so -.le5va2n -.2lev -.l3eva -.libra2r5 -.lib1r4 -.lig3a -.1l2ig -.l2i3o2 -.li4o4n1s2 -.l2i4p -.loc3a -.1l2oc -.lo4g2i1a2 -.1l2o1g -.lo3g2i -.lo2p -.loph3 -.lous5i -.lo4u2 -.lov5er1 -.lub3 -.ly1o3 -.2ly -.mac5u -.mal5ad5 -.mal2a -.ma5l1in -.mar5ti -.1ma2r -.mart2 -.m4a2th5 -.me5lo3d2i1o2 -.m2e -.melo4di -.ment4 -.1men -.men5ta -.me5r2i2d -.mer1 -.me5r3in -.met4er2 -.1me2t -.met1e -.mi4e2 -.mi3gr2 -.m2ig -.min5ue -.m2in2u -.mir2k4 -.m2ir -.mis1 -.mi5to -.mi2t -.mo3b2i -.1mo -.mo5le2c4 -.mon3a4 -.m4on -.mor5ti -.m2ort2 -.mu3n2i -.mun1 -.mu3si -.mus2i5c2o2 -.myth3 -.2my -.3myt -.n2a5k2 -.n1a -.nar1i4 -.na2r -.nast4 -.n2as -.nas5ti -.ne2c3t -.ni4c -.n1i -.ni5tro -.n2itr2 -.n2o4c -.no2m3o -.n2om -.nos3t2 -.no5t1ic -.n2ot -.nuc2le5 -.n2u -.5n4uc -.nuc3l2 -.o2b2ed5 -.ob2e -.o1b3e4l -.o2b3l2 -.od4 -.o2e4d5 -.oe5so -.o2f5t -.2oi4 -.ol4d -.om2e2 -.2om -.om5el -.on4ce -.on1c -.o2n4e -.op2i -.op2t5a -.1o2p1t2 -.or1 -.or4at4 -.or1a -.ora5tor1i -.orat1or5 -.ora2to -.or5che2 -.or4ch -.or1c2 -.or3d -.2or2e4 -.or3eo -.or4i -.orn1er4 -.or2n2 -.or2o -.os1 -.osi4 -.4oth5 -.2ot -.out1 -.o4u2 -.ov4 -.pal5i -.para5di4s1 -.1pa2r -.par2ad -.par5af -.par1a5t -.p2a5ta -.pa4t2io2 -.pe2c3t4 -.pecu3 -.3ped3e2 -.2p2ed -.p2e4nd4 -.pen5de2 -.pe2p3t2 -.per3i5n -.p4eri -.per1 -.per3se5c -.pe2r1s2 -.per2se -.pe5titi -.3pet -.pe2ti -.ph2 -.phe5n2o2m -.phe4 -.phen1o -.phon4i -.1pho -.ph2o4n -.p2i2e2 -.pi3l3a -.p2il -.plast4 -.p2l2 -.1pl2a -.pl4as -.plic4 -.plica4 -.plos4 -.po3l2a -.1po -.po5lite -.pol2it -.po2p -.p4op5l2 -.po5si1t2io2 -.3p4os -.pos1it -.pos5si -.po4s1s2 -.pro5bat -.1pr2 -.pr4oba -.pu4r4r4 -.pu4r -.put4te -.pu2t1t4 -.ra5cem -.ran5g2i -.ra2n -.ra4ng -.re3c2a -.r2e -.r2ef5er2e -.re2fe -.re1f -.refer1 -.re5ga2r -.re1i4 -.re5lin -.re1m -.re5o -.res5c2i2 -.re2sc2 -.re5sen -.re5s2po -.re5stat -.rest2 -.r2e5s4t2or2e -.rest1or -.re5st4r2 -.re3t2a -.re5u -.re3w -.rib5a -.2r2ib -.rin4 -.rit2 -.rol4l2a -.roll2 -.r4os3a -.sa2 -.sac5r2 -.sal4i -.sa2l -.sa5l1in -.salt5er1 -.s4al4t -.sal1te -.sanc5 -.sa2n -.s4ap5a -.s3ap -.sa3vo -.s2av -.sci3e2 -.sc2 -.s1c2i2 -.sea3s4 -.se2a2 -.se2ct4 -.sec5to -.se3gr2 -.sen3t -.se1q -.ser4ie2 -.ser1 -.s2es1 -.sev5era -.3sev -.sever1 -.s2h2 -.si5g2no -.s2i4g -.si2gn2 -.s1is3 -.st4 -.sta2t4o -.stra5to -.str2 -.str4in2g5i -.stri4ng -.su5d4a -.sulph5a2 -.1s2ul -.sulph2 -.sul3t -.tact4i -.ta2ct -.tac5t2ic -.t2a4m -.ta1ma2r5 -.tar5o -.ta2r -.te2ct4 -.tel5a -.te2l -.tell5e -.1tell2 -.te4m -.te5ra5t -.ter1 -.ter4p -.th4 -.tho4 -.thol4 -.ti2 -.til4 -.t2i5n1i -.t1in -.t3i2t4is -.t1or1 -.tran4c -.tr2 -.tra2n -.tri5bal -.t2r2ib -.tri3d -.trin4a -.t4ri5sti -.trist2 -.tro4ph -.tr2op -.tro1ph5o -.tro4v -.tula2r5 -.tu1l2a -.tur1b4 -.tu4r -.tur1i4 -.tu5te -.1tut -.tu3t2o -.4ul4l2 -.ulti5mat -.ultim4a -.ul1tim -.un5ce -.un1 -.un5ch -.u4n3d2 -.under5 -.unde2 -.un3e -.u4n3g -.u1ni3c -.un2i -.un2i3o2 -.u4n3k4 -.u4n5s4 -.un3t4 -.un5u4 -.2up1 -.up3l2 -.ur1a4 -.u4r -.ur5e2th1 -.ur1e2t -.ur2e -.ur4o -.va5l2ed -.1v2ale -.ve2 -.vec5 -.ve5lo -.2vel -.vent5il -.vent2i -.v5er4ie2 -.ver1 -.ver3n2 -.vic5to -.vic2 -.5vi2ct2 -.vi2s -.vis3i -.vi5so -.v2o1c -.1vo -.vo5lut -.vo2l -.wine5s -.wi2 -.win2e -.xy3l -.za5r -.z1a1 -a4a -1ab -2ab. -2aba -ab5ar2e -aba2r -ab4ay4 -2a2b1b2 -ab5ber1 -abb2e -2ab2e4 -ab3erd -aber1 -a5b3e4r1r4 -a3bet -ab1ic -a3b4ie2 -2abin -4a1b2i2o2 -abi5on -ab3it1a -ab4itu -ab3l2a -a2bl2 -abli4 -4abolic -abo2 -abol3i -ab3om -ab3ota -ab2ot -3about -abo4u2 -ab1r4 -2abs. -a4b1s2 -ab1ul -abu4lo -ab3use -ab3usi -2a2by -ac2a -ac5a2bl2 -ac1ab -ac3al -5ac1anth2 -aca2n -ac5ard -a1ca2r -a5c1at -ach5al -a2ch -acha2 -a5ch2in1i -ach1in -ach5i2s1m4 -achro4 -a3c2hr -ach5u4r4 -2a1c2i2 -a4cic -aci4e2r1s2 -aci3er1 -acie2 -ac2i3f4 -4acit -ack5a -ack1 -ac3li -acl2 -4a4co. -a1c2o2 -aco3d -ac5on1r -acon1 -4acos -4aco4u2 -ac1r2 -ac3r2y2 -act5ate -a2ct -ac2ta -act5ile -ac2to -act5o2r2y -act1or -ac2t5r2 -ac5uat -ac2ua2 -a5d2ai2 -a3d2a3v -4adee -ad5en1i -ad4ha2 -a4d1h -ad3ica -a5d2i1f -4adil4 -adi4op -a3d2i1o2 -ad2i4p -adis4i -adi4s1 -a3diti -3adju -ad1j -5admi2t -a2d1m -a2do -4adoe -4ad2oi -ad3ol -a3d4os -ad1ow -ad1r2 -a3dr2a2m4 -4a2du -ad3u1l2a -ad3um -4a2d2y -ae5a2 -ae4cit -ae1c2i2 -ae1c2o3 -4a2ed -aed5i4s1 -ae5g -ae3on -ae5p -aero2d2y5 -aer1 -aer2od -ae4s -ae5si -aes3t2 -aet4a -ae2th4 -aet4or. -aet1or -aev3a -4af. -4afe -af5ta -a2ft -a4fu -ag4ar1i -aga2r -4ageri -ager1 -a5ghe2 -a2gh -a5g2i1a2 -a1g2i -agi4as -4ag2ino -4a2gl2 -agli4 -4a2g1n2 -ag3on1i -agor4a -ag5ot -a2gr2 -ag3ri -a3gru5 -2ah -a1h2a2 -aha2r2 -aha5r1a -a1he2 -a2h4n2 -a5h2oo -2ai2 -4ai. -a2i3a2 -a1ic -aid4a -ai2d -aid5er1 -a2ig2 -ai5gu -ai2l3er1 -ail3o -aim5er1 -aim2e -ain5de2r3s2 -a2i4nd -ainde2 -ainder1 -a4i5nea2 -a3ing. -ai4ng -a2in3i -a2in5o -aint5er1 -ain1te -air5a -a2ir -air5p -ai2r3s2 -ais1i -a5i2s1m4 -2a1j -a4ju -2ak -akel4 -ak5u -al5a2bl2 -al2a -al1ab -ala2ct4 -al4ac -a1l4ae -al5ais -al2ai2 -ala3ma -al2a2m -al5ance -ala2n -al3at -a5l2av -al2c3at -al1c2 -al3ch -ald5ri -aldr2 -2ale -a3lec -aleg4 -ale5ma -al5end4e2 -al2e4nd -a1leo -a2let -al3ib1r4 -ali4c2i2 -al5i4c5s4 -al1i2d -al3i1f -5a1l2ig -al1in -a5l2in1i -al2in5o -al5ipe -al2ip -al5ip2ot -ali3po -4alis. -al1is -4al2i1u2 -4alk -alk5ie2 -al4l1ab -all2 -al1l2a -al4lag -alli5a2n -al1li -all2i1a2 -al1l2ig4 -al4lis2h -all1is -a5loe -al3o1gr2 -a1l2o1g -a3l2om -a3l2oo -al1or -al4orim -alor1i -alos4 -a4lo4u2 -al3ous -a5low -al5pen -al3ph2 -al5tati -al3tie2 -alu3b -al5u4ed -a4lue -al3ues -a5lumn2i1a2 -alu4m1n2 -alumn1i -al1va -al5ver1 -alv5u -2a2ly4 -a5lyn1 -2a2m -a5mad -ama4g -ama4n5d -a1ma2n -a5marin4e -a1ma2r -amar1i -a3mas. -am1at -a5m4at1i2c -am5a2tu -am4bin -a2m1b -amb2i -3ambu -am5elo -am2e -a3men -am2e4n4d -am3era -amer1 -am5erl -am1i -am1i2c -am5ica -am2i1c5r2 -3ami2d -a3mili -am2il -am5i2ly -amin2i4f -am2in1i -am5in1iz -am4inos4 -am2ino -a5m2is. -a4mium. -a3m2i1u2 -ami3um -a3m4on -a1mo -amor5a -am2ort3 -am5ose -am2p -am5p4er3i -amper1 -amph2i5g -amp3li -amp2l2 -ampo5l -am3po -am3ul -amyl5 -a2my -a2n -an2a -a5nadi4 -an3ae -an3age -an2a5k2 -an3ali -an3ar1c2 -ana2r -a5nast2 -an2as -an4con1 -an1c2o2 -an3d4at -a4nd -an2da -and5au2 -and5eer1 -ande2 -an5de4l -an5d2i1f -and5ist2 -andi4s1 -an5dit -an4don1i -an1do -an4ea2 -an5eer1 -an3ell2 -anel5li -an3eu -a2n3ga2n -a4ng -ang2a -angov4 -an1go -an4gu4r -an1gu -4an1h2 -an3ic -an1i -an2i3f -an5i3fo -4an2i2g -an5ion -an2io2 -anis5te2 -anist2 -4anity -4an2i1u2 -an5no -an1n4 -4an2ny2 -an1o -an2oe -an3o1ma -an2om -anor3 -an2os -an5ot -a4n2s -an3s2c2 -an4s2c2o2 -ans3il -an2si -an4su4r -an3su -an2t2a -ant5a2bl2 -an2t1ab -an3t1al -an5t2a2m -an2te -1anth2 -an4thi -3an1t2h2r -4ant1ic -ant2i -an4tie2 -an4t3i4ng -ant1in -an2t4iv -an4to2n4e -an1t2o -an1t4r2 -an4tus -an1tu -an5tym -an2ty -an3ul -an2u -an3um. -an5u4m1s -a3nu4r -a5nut -a2n2y2 -an5y1a2 -a5ny1i -2ao -aol3i -5aow -2ap -4ap. -4apa -a1pac -ap3al -ap5aro -a1pa2r -ape5li -a5peu -aph5em -aphe4 -aph3i -aph5ol -a1pho -a3phy2l3 -aph2y -ap1i -ap5icu -ap3in -ap4in4e -a5p2ir -a3pla2n -ap2l2 -a1pl2a -ap5li -apo5str2 -a1po -a3p4os -apost2 -ap4o3th -ap2ot -a2pr2 -ap5ron -4a2ps2 -apt5at -a2pt2 -apu5la2r -apu1l2a -a5p4un1 -a4q -a5q2ui2 -aqu4 -a2r -4arabi -ar1ab -ar2a5bo2 -aract4i -ara2ct -ara2g -ar3age -ar4a2g2e4d -ar5a1g2i -ar3ago -a3r2a1j -ar3all2 -ar2a3m -ara4n4g -ara2n -aran5te -aran2t -ar5apa -ar2ap -ar1at -a3r2au2 -ar2a3v -ar3ba -ar1b -arb5et -ar2b2e -ar4b2i2d -arb2i -ar4bl2 -arb3li -ar4bul -ar5chet1 -ar1c2 -ar2ch -arche2 -ar1ch5o -ar5din1a -ar2d2in -ar4do2ne -ar1do -ar3en -ar2e -ar2e4n5d -ar5e2t2t4 -ar3e1v5 -ar5gh -ar1g2 -ar3gu -ar3h2 -ar1i -ar5i2ff -ar2i1f -ar4ill2 -a5r4i5net -arin4e -ar5in1i -a5rishi -aris2h -arm3er1 -arm2e -ar5mi2t -ar3nal -ar2n2 -arn1a -ar3nis -arn1i -ar3od -ar5o2i4d3 -a4r2oi -aro4mas -ar2om -aro1ma -aro4n -a5roti -ar2ot -a5ro4uc -aro4u2 -a4r3o4x -arp5e2r1s2 -arpe2 -arper1 -ar4pu -2a4r1r4 -ar2rh2 -a2r2s2 -ar2s5a2l -ar3so -art5at -art2 -ar2th -arth4e2 -ar1t2h3r -ar5t1iz -2aru -ar3um -ar5un4 -a3ry1o2 -a2r2y -a5ry2t -ar5z -as1a -as4af -asa2n2 -2asc2 -as5con1 -as2c2o2 -as5c2ot -as2cr2 -as2e -as3e2ct -4a2s2ed -ase2p4 -ash5ay -as2h -asha2 -ash5i4l -as5i2ly -a2s3in -a5s2io4 -a3s1it -a4s5iv -ask5er1 -as2k2 -aske2 -aski4 -as4l2a -a2sl4 -as4lo -2aso -as5o2ch -as2oc -a4s4o2n2ed -as4o2ne -as5or -as3ph2 -a4ss2 -assa5g2i -as1sa -assa4g -ass5i2bl2 -as4s1ib -as4sil -as3s2it5 -2asta -ast2 -as4tat -as4t2i3a2 -as3t1is -as4tit -4asto2 -as3tra -astr2 -as4tri -as1u -as4un1 -as5u4r -2a2ta -4atabi -a2t1ab -a5tal1is -at2a2m4 -at2a3p -atar3a -ata2r -ata3s -ata3t4 -at3e2au3 -atea2 -at3e2ch -at5eer1 -a5tel. -ate2l -at2e5le -at5enat -aten1a -at3ent -4ater1 -at3era -at5er1n3is -ater2n2 -atern1i -at5ern3iz -4ate4ss -a2t2es -at5et -4a2th -ath3a4 -a3then -athe2 -ath5er3in -at4her1 -ath5ero -ath5ete -athet1 -ath3i -ath3od -a5th2o4n -a1t2h5r -4a3t2i2a2 -at1i2c -at5icis -at2i1c2i2 -ati5cit -at5ic1iz -a2t2i1f2 -a4t1i4l -a4tim -a2t3in -4atin1a -at5i4ng -4at4is. -a2t1is -at1it -atit3u -atitud5i -4a3t2i1u2 -at4ivi -a2t1iv -a5tiv1iz -a2to -5at5od -4a1t2o1g -2atol -4aton -a3t2oo -a4to2ps2 -a5t4oria2n -at1or -ator1i -ator2i1a2 -a4to2r2y -atos4 -a5t2oz -2a2tr2 -at3ra -a4tr2e -5at5re4s4s -at1ri -a3t2r2ic5u -at3ron -at5ro4u2 -at4tag -a2t1t4 -atta4 -2a2tu -at1ul -atu4m -at3ur1a -atu4r -at3ur1g2 -4a2ty -2au2 -4au. -au1b5i -4a4uc -au5cer1 -au1c3o2 -au4d5er1 -aud4e -audic4 -aul3i -aul4t -aul5t4ed -aul1te -ault5er1 -ault5i -au3ma -aun2 -aun5ch4ie2 -aun2ch -au4n3d -aun4dr2e -aundr2 -au5reo -au4r -aur2e -aur4o -au5ror -4aus. -aus5er1 -aus5p -au4s4t4ed -a4ust2 -auste2 -aut3a2r -aut3er1 -au3th -2av -a2v4ab -ava4g -av3a4ge -ava5l2a -av5alr4 -av5ant -ava2n -av5a2r -avas3 -av3e4nd -av3er2n2 -aver1 -av3ig -av4i1ol4 -a3v2i1o2 -av1is -aw5er. -awer1 -aw5e2r1s2 -aw1i2 -aw5n2ie2 -awn1i -aw5y2 -a4x -ax2i2d -ax1i -4ay -ay5l2a -ay3m -ayn4 -a4y2s2 -ay5si -ay5sta -ays1t2 -ayth4 -2az2 -az3a2r -az1a1 -aze4 -az5ee -azy1g4 -azz4l -azz2 -2ba. -ba5b2ir4 -b1ab -3back1 -baen4 -bag4a -5b2ah -b2a4i2 -bal3a -balm5i -balm2 -ba5lon -bal5u -bam4a -b2a2m -ban4a -ba2n -ba5na2n -b4ane -5ba4ng -b4aniti -ban1i -b4a4n2s -b2a4p1 -5bar1b -ba2r -bar4d -bardi4 -bar4n2 -ba5r2om -bar3o4n -5ba2r2s2 -1bas -bas4te2 -bast2 -b4a4th4 -3bat1i2c -ba5t2io2 -b4at5on -ba2to -battle5 -ba2t1t4 -bat2tl -2b1b2 -b4b2a2ta -b3bli -b2bl2 -b4bo2n4e -bbo2 -b3bon -b1c2 -bcord4 -b1c2o2 -2b1d -bdeac5 -bdea2 -bde4b -b1di4v2 -b2e -4be. -3bea2 -4beas -be3c2a -3becu -2b2ed -be3da -bed5el -bede2 -bed2i -be4do -be5dra -bedr2 -be4du -5bee -3be1f -be3go -be5gr2 -be3gu -1bel -be3l2a -2b2ele -be3l1it -bel4t -be3m -b2e4n4d -ben2d5a -bend5er1 -bende2 -b4e1ne -be5n2i2g -ben1i -b4e5n2u -4beo -be3q -2b2er2e -ber1 -berg2a5m -ber1g2 -berl4 -5be4r1r4 -be2r5s2 -b5er2t1in -bert2 -be1s2 -2b4es. -be3sl4 -be3tr2 -be3w -2b1f -b1f2a4 -4b1h -b4ha2 -2bi. -1b2i1a2 -bi4b1 -bi1cen5 -b4ice -3b2i2d -bid5i -b4ie2 -bi4e2r1s2 -bi1er1 -b2i1f4 -bi4fid. -bifi4d -bi5ga -b2ig -bigu3 -b1il -b2ile -5biles -3b2ill2 -4bim -bi1me2t5 -bim2e -5bin1a -5b2i4n4d -bind3e2 -bin5et -b2in5i4 -1b2i2o2 -b4i1o3l -bi2o5m -bi3o4u2 -b2ip4 -b2i5q -b2ir4 -bi3r2e4 -bi5rus -b2is -5bi2s1m4 -bis4o -bi5s2ul5 -3bit2u1a2 -4bity -bi5ve -b1j -4b5k4 -2bl2 -5bl4ac -bl2a -blag4 -b3la2n -5blast2 -bl4as -bl2a5tu -blem5at -3b2ler1 -5blesp -4b3l2ik2 -blim3a -bl2i3o2 -bl2i2q -b3l1is -4b2ly -2b1m -bment4 -bm2e -b1men -bmi4 -4b1n -bo2 -4bo. -3b2oa2 -bo5a2m -5bob -bod5i -bo5h2 -2bo2i4d -b2oi -4boke -bol4e -4bo2l2ed -bol3i -bol4t -3bon -bon4c -bo2n4e -bon4ie2 -bon1i -bon3i4f -bon4sp -bo4n1s2 -1b2oo -b3orat -bor1a -bor3d -bor5ee -b2or2e -bor5et -3bor1i -bor5ic -bor5i3o2 -bor4n2 -bot3a2n -b2ot -5boti -boun5t2i -bo4u2 -b2oun1 -3bou4r -bous4 -bow2 -bow3s4 -4boxy -bo4x -5b2oy -br4 -3bra2ch -4bral -br2a2m4 -b2ra2n -bra4n4d -4b4re. -br2e -b4reas -brea4 -4b2res -brev5et -bre1v -b2ri2d -5brie1f -brie2 -bri4ng5 -bri4os -br2i3o2 -b5rist2 -b4r2oa2 -bro4ma -br2om -bros4 -brum4 -4bry. -b2r2y -4b1s2 -b3sc2 -bscon4 -bs2c2o2 -bsen4 -bserv5a2n -bser1 -bser1v2 -b5si -b2sin4 -bso2 -bsol3e -bs2ol -bso3lu -b4stac -bst2 -bstu1pe5 -bs1tu -bst2up -2b1t -b5t1let -b2tl -4bu. -5bub -buf5fer1 -buf2 -bu2ff -buf1fe -b4ul2i -b4ulos -bu1lo -bun2 -bu4n4a4 -b5u5nat -bunt4 -bur3e -bu4r -bur4ri -bu4r1r4 -busi4e2 -bu4ss2 -bus5si -3b4ust2 -bu5ta2r -b3ute -b5ut1in -but2i -3bu3t2io2 -bu2t4iv -b5ut5o -b1v -4b3w -2by -4by. -3by1i -b4y2s4 -5byt -2ca. -c2ab5in -c1ab -c4ace -ca1c2o3 -cad4r2 -5caf -ca3go -5c2ai2 -5c2ak -c1al -c4al2a -ca5la1ma2n -cala3ma -cal2a2m -cal5a2r -3cal1c2 -ca5le1f -c2ale -cal2l5in -call2 -cal1li -cal4m2 -c2a3ly4 -ca3ma -c2a2m -cam4i -ca5na2r -ca2n -can2a -c2an4e -c4an1o -ca3n2oe -can5ta2r -can2t2a -can5t4ed -can2te -c4an4t1ic -cant2i -can4t4r2 -5c2ao -1c2ap -ca5p2il -cap1i -ca2pt4 -cap3ti -cap3u -1ca2r -ca3ra5c -car5am2e -car2a3m -ca3ree -car2e -ca3r4i3c -car1i -car3i1f -car5m -car3n1i -car2n2 -car3ol -car5o4n -car5oo4 -ca3ro4u2 -car4v2 -cas2e5 -cashi4 -cas2h -3ca4s3s2 -cas5t2ig -cast2 -3cas1u3 -c1at -c4at. -c2a2tc2 -c4at2om -ca2to -c2a3t2r2 -c4a4t1s -c2a2t4u -3c2au2 -caulk4i -cav3i4l -c2av -3c4ay -c1c4 -ccen1t5r2 -c1cen2 -c3cent -cces4sa -c5ce4ss -c3ch -cci3d4 -c1c2i2 -cc2ip4 -cc2le3 -ccl2 -4ce. -4c4e1ab -cea2 -cea2n3 -3ceas -ce4c2i2 -2c2ed -5ceda -ce3da2r -3cede2 -3cedi -4ce1f -ce5g -3ce2iv -cel3ai2 -cel2a -cel5ib5 -5cell2 -cel5lin -cel1li -celo4 -ce5l2om -4ce2ly -2cem -ce4me2t -ce1m2e4 -3cemi -ce4mo -1cen2 -5cenc -cen5c2i2 -cen5d2ed -c2e4nd -cende2 -cend5en -cend5er1 -cen3i -2cen1n4 -3cent -cent4a -cen5t4ed -cen1te -cen5ter. -center1 -cen5te2r1s2 -cen5t2es -1cep -cept3a -ce2pt2 -cep5t4ic -3cera -cer1 -cer4b2i -cer1b -3c2erd -ce3rem -c2er2e -5cer2n2 -5ce4ss -cest5o -cest2 -ces5t4r2 -ce2t -cew4 -2ch -4ch. -4ch1ab -cha2 -3chae -3ch2ai2 -cham5per1 -ch2a4m -cham2p -chan5g2i -cha4n2g -cha2n -ch4a3pa -ch2ap -chec4 -che2 -4ch2ed -3chee -3chem -che3o2l -ch1er1 -ch4eri -5cher3in -ch4erl -4ches -3chete -chet1 -ch5eu2 -che5va -che4v4 -3chew -ch5ex -5c2hi. -3ch2i1a2 -3ch2i2c2o2 -ch1ic -ch3i2ly -chi4l -ch4in. -ch1in -ch3in1n4 -3ch2io2 -5ch2i2p -ch2izz4 -ch1iz -ch5k -5chlor -c4hl -4c2h1m -1cho -ch2o3a2 -5ch2oc -4ch2oi -ch5o2i4d -3chor -4cho2r4ed -ch2or2e -chor5ol -4choso -3ch2ot -4choti -ch5ous -ch2o4u2 -chow5 -3c2hr -chu4r4 -3chut -5chy1d4 -ch2y -3chy2l -3chy2m -1c2i2 -4ci. -4ciac -c2i1a2 -ci2a4m -ci3ca -4ci4d1s2 -ci2d -4cie. -cie2 -ci3er1 -ci3es2t2 -c2i5et -c2i3f -cifi4 -4c2ig -ci3ga -c3iga2r5 -3cil -cil5lin -cill2 -cil1li -2cim -cim3a -ci3m2e -5ci1men -4cin3ab -cin1a -4c2i4nd -c4ine5a2 -cine5mat -ci5ne4ss -cine4s -4cint -c4i3ol -c2io2 -ci5om -ci4po -c2ip -cisi4 -c2it3r2 -ck1 -cka2r5 -cka5t -c4ke -ck5i1f -ck4sc2 -c4k1s -cl2 -cla5r2i1f -cl2a -cla2r -clar1i -3cl4as -c2le2 -2c4le. -c5lec -clemat4 -c2lev3 -cli1m -c3li4ng -cl2i2q -c1lo4q -c4l4o1tr2 -cl2ot -c4lue4 -cl2yp5 -c2ly -5cl4y2s -cn2 -c3n1i -1c2o2 -4co. -3c2oa2 -c4o5ba -3c2oc -co3c2i2 -co5cu -co3dic -co3d2i1f -4co2d2y -3coe -co5et -co3gr2 -c2o1g -4c3o2i4d -c2oi -co3inc -4col. -col3a -co3l2o1g -co1lo -5col1o4u2 -co5ly -co5mas -c2om -co1ma -co4m2e -co3mo4 -com1p4 -con1 -con4ati4 -con1a -con4ch -con1c -con3d5er1 -co4nd -conde2 -con4ey -co2ne -con4ie2 -con1i -co4n3s2 -c2on3t -conta5d -3c2oo -coo2p4 -co3or -cop4e -co3ph -c4o5p2l2 -co3po -c1o2p4t2 -2cor1a -cor5d2ed -cord5er1 -4co2r4ed -c2or2e -co3rel -3cor2n2 -4coro -co5rol -5c2ort2 -3cos. -c4ost3a -cost2 -cost5er1 -coste2 -co5ta -c2ot -3c4o3tr2 -5coty -co4us5t2 -co4u2 -cov1 -co3va -cow5a -c2oz4 -co5z1i -c1q -cr2 -5craf -craft5i -cra2ft -c4ra2n -5cran1i -cr4a5n2i1u2 -cras3t2 -cras2 -cra4te -c2r2e -4crea2n -crea4 -cre3at -cre4p3 -5creti -cre4t2o -cret5or -cri3l -cron4 -crost4 -4cro4u2 -5c4rus -c2r2y2 -crym3 -cry1o3 -4c5s4 -csim5 -2ct -c2ta -c3tac -ctac5u -c5ta5g -ct1a2n -ct5ant -c5tar2i1a2 -cta2r -ctar1i -c3ta2to -c1te -c4tea2 -c2t5ee -c4tent -cter4i1a2 -c1teri -cter1 -c2t5es -ct5et -ct2ic -c5tic2i1a2 -ct2i1c2i2 -c4ti4c5s4 -ctifi4e2 -c1t2i1f2 -c3tim -ct4in. -ct1in -ct4in1a -ct5i4ng -c3t2in1i -c5t2in5o -c5t2io2 -c3t2is -c3tit -c4titu -c4tity -ct5ive -c2t1iv -ct4iv1i2t -ct5o1lo -c1t2om -c3ton -c5toris -ct1or -ctor1i -c5tor1iz -c1tr2 -c2tr2e -ct2r2o5t -c1tu -c2tum -c1ty -cub3at -cuba4 -c4uf2 -cu5ity -c2ui2 -cu4l5ab -cu1l2a -c2ul2i -cul2l5er1 -c4ull2 -cul2l5in -cul1li -1c2ult -cu4mi -5cu4n3a4 -cun1 -cun4e -5cun2i -5cuol -cu5pa -c2up -cu3pi -c3up2l2 -1cu4r -cur4er1 -cur2e -cur5ial -cur1i -cur2i1a2 -4cur4o -1cus -cus5a -c3u2t1iv -cut2i -c3utr2 -5cuu4 -cu5v2 -2cy. -cy4b2i -c4y1b -1cyc -cyl3 -cy4m -cy5no -cyn1 -c4y2s4 -cys5to -cys1t2 -cy4t -cz2 -4da. -d4ab1r4 -d1ab -1d2ac -da2ch4 -d5ache2 -3da2ct -d1ag -d4a4g2i -d4ale -d4al1g2 -dal5ler1 -dall2 -dam5a -d2a2m -3dam2e -d3am1i -da5mu -3da4ng -da2n -d1an4t -d3ap -d3ard -da2r -5darm -3d4as2 -d2ast5a -dast2 -d1at -da2t1iv4 -d2a2t4u -dau2gh3 -d2au2 -daun5te -daun2 -3d2av -d3b -d3c4 -d1d4 -d4d4er2e -dder1 -d3di -d3d2ler1 -d2dl4 -d3dli -d3dy1i -d2d2y -2de. -de1a2c3t -dea2 -de5aw -de4b2i -de1b -deb5it -3dec -de5cant -dec2a -deca2n -de4cil -de1c2i2 -de1cr2 -4de2ct -ded3i -d2ed -def2or5e -de1f -de1fo -de4fy. -de3g -de4gu -de3io2 -5d4e3is -de3lat -del2a -de1li4e2 -del5ler1 -dell2 -del5li -de5lo -1d4em -4de4mie2 -4dem4is -d4em4o4n -de1mo -de4mo4n1s2 -de3mor -de4mos -4de2my -de1n2a -d2e4n4d -4d4ene -d3en1h2 -den2i4e2 -den1i -dens5a -de4n2s -dens5er1 -den5tit -dent2i -de3od -deo3l -deon2 -de3ont5 -de1p -depen4 -deposi4 -de1po -de3p4os -de2p4u -d3eq -derac4 -der1 -de3r2ai2 -d4er2e -4d4e2r4ed -de5reg -3der3er1 -1deri -der3k4 -3derm -der4mi -der5min -5derne -der2n2 -3dero4 -der5os -de2r3s2 -5d2eru -4d4es. -de3sa -5de2sc2 -des4ca -de5sc1al -de3sec -des4i -de3s4i2d -des5ig1n1a -des2i4g -desi2gn2 -des1p -des5p4o2n -des2po -de3sq -d3e4st. -dest2 -de2s3ti -1de1t -de3t2es -de5th1 -de2ti -dev3i4l -de3vis -de3vi2t -de4v2oi -de1vo -devol5u -devo2l -3dex -2d5f -dfol4 -d1fo -d2g -dg4a -d1gel4 -d4gen -d3gr2 -4d1h -dh2ot4 -d4hu -4di. -1d2i1a2 -di2ad -3dia2r -di5at5om -di3at -dia2to -4d1ib -d1ic. -dic5a2m -d4i4ce -d4i3ch -d5icl2 -dic5ol -d2i2c2o2 -1di2ct -dic5tat -dic2ta -dic4te -5dicul -d2icu -d5i1cu4r -1di2d -di4e2r1s2 -die2 -di1er1 -3di3ev -d4i3fo -d2i1f -dig3al -d2ig -di3g2a2m -dil4 -5dill2 -dilo4 -d4i3lu -di5mer1 -dim2e -di1me2t4 -di1m1i -2d1in -din4e -d4in5g2i -di4ng -d4i5nos -d2ino -3d2i1o2 -di2o4c -di4ol2a -d4i1ol -di2p5t2 -d2ip -3dir2e -d2ir -di3r1i -4d5iro -di4s1 -d4i2s3c2 -d4is3en3 -3d2is2i1a2 -3d2i4s1s -d4it4as -dit1a -d4iter1 -dithe4 -d2ith -d3ito -dit1or3 -2dity -1d2i1u2 -1di1v2 -di4val -d2iva -di5vin2e -di2v1in -dix4i -d2ix -d1j -2dl4 -d1l2a -5dle1f -5dlest2 -3dlew -dlin4 -d1lo -d5lu -2d1m -4d1n2 -1do -4do. -d4ob -do4c3u -d2oc -do2g4a -d2o1g -do4j -d4ol. -dol3en -do5l4ine -dol5it -do4lon -do1lo -d4o4ls -5dom. -d2om -do1ma2n4 -do1ma -domin5 -dom1i -dom5ino -dom5i2t -do5mo -don4at -don1a -4do2ny2 -3d2oo -d2or -4dor. -d2or4m -d2ort4 -d4os -do5sim -dossi4 -do4ss -dot1a -d2ot -dot4t1in -do2t1t4 -dot3ti -2dous -do4u2 -d4own -3do4x -d1p -dr2 -d5rail -dr2ai2 -d3ral -3dr2a2m -dra2n4 -d4ras2 -drast4 -3drel -dr2e -dres4 -dres2s5o -dre4ss -dri4e2 -d4r2i1f -dr2i4g3 -d4r2om -dro1pho4 -dr2op -dru4n2k3 -drun1 -4d1s2 -d5sl4 -d2s3m4 -ds4mi -d4sw2 -dt4 -dt5ho -1du -2du. -du1at -d2ua2 -3d4uc -du4ch5 -duci5a2n -du1c2i2 -duc2i1a2 -du4c2o2 -du5eli -du5ell2 -du5en -du5e2t2t4 -due4t -du5in -d2ui2 -dul3c2 -d3ule -d4ul4l2 -dum4b2e -du2m1b -du4n4a4 -dun1 -d5un4c -d2u2p -du3p2l2 -5dur1o -du4r -d5use -dust5er1 -d4ust2 -duste2 -du3u4 -d1v -dver2 -dvert3 -dvoc5at -d1vo -dv2oc -dvoc2a -2d1w -dwell3 -2d2y -dy4ad. -dy1a2 -d1y5a2r -5dy4e -5dyk -dyl2 -dyll3 -5dymi -3dyn1 -dys3p -d4y2s -d3zo -ea2 -4e1ab -e1a2ct -eac4te -ea5cu -e5ad1d4 -ead3er1 -ead1i -ead3li -ea2dl4 -ea4g -e2ak1 -eal3a -ea2l3er1 -e2ale -ea3l2o1g -eam4bl2 -e2a2m -ea2m1b -eam3er1 -eam2e -ean5i -ea2n -e2ap2 -eap5er1 -e3a4p1p2 -ear3a -ea2r -ear3er1 -ear2e -ear4li -e5a4r2r4 -ear4te -eart2 -earth5i -ear2th -eas5er1 -eas2e -ea4son1i -e2aso -e1a4s1s2 -eassem4 -eas4t2 -east5i -eat5en1i -e4at3er1 -eat5ie2 -e3a2t2i1f2 -eat1it4 -eat4it3u -e3at1ri -e2a2tr2 -e4a2tu -e2au3 -eav5i -e2av -eavi4e2 -eav5o4u2 -ea1vo -eaz5i -e2az2 -e1b -eba2r4 -e2b2b2 -eb2e4 -e4bel. -e1bel -e4be4ls -e2ben -eb5et -eb2i -e5b1il -e4bin -e4b2is -e4bl2 -e4bos -ebo2 -ebo1t3o -eb2ot -e2br4 -eb1ra -e2b2t -e4b4uc -ebus5i -ec2a -ec3ade -ecad5en -ec2al5e -ec1al -e5c2a2m -e4ca1po -e1c2ap -ec3at -ec5a2th -e1ce -ecen2t5o -e1cen2 -e3cent -ech3i -e2ch -e4cib -e1c2i2 -ec2i4f -ecip5i -ec2ip -e1cl2 -ec3l2ip -econ4s2c2 -e1c2o2 -econ1 -eco4n3s2 -econstit5 -econ3s2t2 -e2c3or1a -e4c5oro -ec3rat -ecr2 -e4c5rea2n -ec2r2e -ecrea4 -e4crem -ec1ro -ect5ati -e2ct -ec2ta -ec4ter1 -ec1te -ect2i4c -ec4tit -ec4t5us -ec1tu -ec1ul -e5c2ul2i -2ed -e5da4n2s -eda2n -e2d1at -ede2 -2e4d2ed -e5de1h2 -e4d2ele -edes3t2 -ede3te -e1de1t -edeter5 -e3dev -e5dew -ed4g -edi4a4ls -e1d2i1a2 -ed5i4c1al -ed5i4c5s4 -ediges4 -ed2ig -edi1ge -ed5i1gr2 -ed3im2e -ed1it -e1di2v2 -ediv5i2d -ed3li -e2dl4 -ed2or4 -e1do -e4do4x -ed1ro -edr2 -edu5cer1 -e1du -e3d4uc -e2dul -ed3u1lo -e4d5u4r -ee4ce -eed3er1 -e2ed -eede2 -ee4do -ee2f -ee5g -ee1i -ee2l1i -ee2m -eem5er1 -ee1m2e4 -eem3i -eep1 -ee4pa -eer4in4e -eer1 -eer3in -eesi4 -ee3to -e1f -efac2t5o -e1f2a -efa2ct2 -efal4 -ef5er3ee -efer1 -ef2er2e -ef5ini4te -e2fin -ef2in1i -e4fite -ef4l2 -efor5est2 -e1fo -ef2or2e -2e3fu -e4fug -efut5a -e1gel3 -eg2i5a2 -e1g2i -e4gib -e3gl2a -e2gl2 -eg3le -eg4mi -e2g1m4 -eg5n1ab -e2gn2 -eg1n1a -e5g4on -e2gr2 -e5gu4r -e1h2 -e5ho -e2h5s -eh2y2 -ehyd5r2 -ehy1d4 -ei2d4 -5ei1do -4e2i1f -e2ig2 -e5ignit -ei2gn2 -eig1n1i -e4in. -e3inc -e2ine -e1i4ng -e2in5i -e4ins. -ei4n1s2 -e2i4p4 -eir3o -e2ir -4eis -eis3i -eit5er1 -e2ith4 -e2iv -eiv3er1 -e2iz -e1j -ejudic4 -eju1di -ek3en -ek5is4 -ek4l2 -e4l4ac -el2a -e5lad -el5age -el2a2m4 -el5anc -ela2n -elast3 -el4as -e4la2t2es -ela4te -el5at3ive -ela2t1iv -elch5er1 -el1c2 -elch4e2 -el2ch -eld3er1 -2ele -elea5g -elea2 -4e4l2ed -el5en1i -el3en3o -ele3o -ele5ph1 -e2l1er1 -e1les -e5le4s4s -e4leste2 -elest2 -el3et3o -e1let -el3ev3a -e2lev -ele3vi -el5ex -e4l3ica4 -e1lie2 -eli4e2r1s2 -eli1er1 -e3lim -el3i4ng -eli3on -el2io2 -e4l1i4s -el2i2t4t4 -el1it -e3l4iv -el4l1ab -ell2 -el1l2a -ell5iz -el1li -e3l2oa2 -e3l2oc -elo5c2a -eloc3u -elo4di -e2l2o1g -elom5ate -el2om -elo1ma -elo3mat -el5op. -el5o2ps2 -elp5in -el3so -e4ls -el5tie2 -e1lu -elu4m -elus4 -elv4 -e5ly1i -e2ly -3elyt -e2m3ago4 -em3an3a -e1ma2n -e1ma2r4 -emarc5a -emar1c2 -em5at1iz -em2at5ol -ema2to -em5b2i -e2m1b -e1m2e4 -e4mee -e4mel -e3me2m -e4m3era -emer1 -em5ero -emet4e -e1me2t -em4icis -em1ic -emi1c2i2 -e4mie2 -e2m2ig -emig5ra -emi2gr2 -em3in1a -em5i4ng -e3m2i3o2 -em3i2s1m4 -e4mit1a -emi2t -e4m2i1u2 -em4mae -e2m1m2 -4emnit -e4m1n2 -emn1i -emo3b2i -e1mo -emo2d4u -emod1 -e2m2o1g -e4m2oi -em3o1lo -em5o1m -4em4on -e3mon1i -emon5ol -emo4no -e2mor -em5oris -emor1i -em3o4r1r4 -e4mot1ic -em2ot -e5m2oz -em1pa5r -em1p -em3pa -empara5 -em5pes -4emp4li. -emp2l2 -em4pr2e -em1pr2 -em3um -e5mut -en3a2c -en1a -e4nal -en3a1m3o -e1n2a2m -en4an1n4 -ena2n -e2n3a2r -en3as. -en2as -ena5tur2e -en2a2tu -enatu4r -3en1cep -en4cile -en1c2i2 -en3cil -en2c1t4 -2e4nd -en4d5al -en2da -en4de2dl4 -ende2 -en1d2ed -end5rit -endr2 -4ene -e2n2e5d -en3ee -e5nelle -enell2 -e5ne2p -e2n1er1 -e5nereo -en2er2e -ener5v2 -en5esi -e3ne4ss -en1et -en4e2t2t4 -e2n3eu -e3n4ew -en3g2i -e4ng -en3ic -en1i -en5i1er1 -en2ie2 -en3i1g3r2 -en2i2g -en5in -enit5u -e4n3k -en1o -en3oi -en2o2m -en3oty -en2ot -enov3 -e4n2s -ens5a2l -en3sp -en4s4u2m -en3su -en4sus -ent3a2r -en4te2r1s2 -en1te -enter1 -en5t2i2a2 -ent2i -en4ti3fy -en1t2i1f2 -en2t2o -en4tri -en1tr2 -ent5rin -ent5up -en1tu -en4tus -4en2u -en3u1a2 -en3uf2 -en3u4r -en5ut -5enwa -en5w -eo3b -e4o2ch -e2oc -e4oda -eof2 -eo2l -eol5ar. -eol2a -eola2r -eol5at -eolo3g2i4 -eo1lo -eo1l2o1g -e5olu -e2o3m -eon4a -e3ont -e1o2p4t2 -e1or1 -eor4de -e2or3e -eor5o -eo1s2 -eo4t2o -e2ot -e1pa -ep4al -ep5ar1c2 -e1pa2r -epa4t -epend5en -ep2e4nd -epende2 -ep5ert2 -eper1 -e4pete -e3pet -epe5ti1t2io2 -epe2ti -e4p5ex -eph1 -eph4i -e2p2ig -e5pl2a -ep2l2 -ep3lic -epol3a -e1po -epol3i -epol2it5 -ep3re1h2 -e1pr2 -epr2e -ep3res5e -e4p5ri4m -e4p5rob5 -ept3or -e2pt2 -e1p4u -e3pu4r5 -e4puta -equin4 -equ4 -eq2ui2 -equ2i5no -er1 -era4cie2 -er2a1c2i2 -era4do -er2ad -era4g -era4l -er3aph -er2ap -er3ap1i -er3a2p4y -4era4ti. -4era4tim -er5a2tu -er3bat -er1b -er2ba -er3b2e -er2b5os -erbo2 -2er1c2 -er3ch -er3cl2 -2erd -er2d5a2r -er4di4e2 -2er2e -er3eal -erea4 -4e2r4ed -er3e2gr2 -er5el. -er5ell2 -er5e4ls -e4re1m2e4 -er3en -5er2e4nd -er4en4e2 -ere5o2l -e3re1q -er3er1 -ere4s -er5ese -er3esi -er5este2 -erest2 -er5e2sti -eres5t4r2 -eret4 -er3et. -er3e4t1s -er3e2t2t4 -ere4v -er3ex -ergi3v -er1g2 -er1g2i -er3gl2 -er3ia. -er2i1a2 -er4ia2n -eri4ci2d -eri1c2i2 -5er5ick1 -er2i2d -er3ie2 -er3i2ff -er2i1f -er4i1me2t -erim2e -er3in -eri4n1a -eri4on -er2i3o2 -er3io4u2 -er4i2s4c2 -er4i5sta -erist2 -4eri2t -e3r2i4v -er5iz -4er1j -er2k4 -er3m2e -er4m2oi -er1mo -5ernacl2 -er2n2 -er3na2c -ern1a -er5nal1is -er1n3er1 -er1n3is -ern1i -ern3it -4e4ro. -er3o2i4d3 -e4r2oi -er4o5is -ero5st2 -erpent5in -erpe2 -er3pent -erpent2i -erre5l2a -e4r1r4 -erre2l -err2e -er4rep -er5s2ine -e2r1s2 -er2sin -er5t4ed -ert2 -er4ter1 -ert5er. -ert5e2r1s2 -er4th2i -er2t5iz -2eru -eru4b -er2u5d -eru4n2d5 -erun1 -er4vi4l -er1v2 -5erw2au2 -er1w -eryth3 -e2r2y -ery2t -2er2z -4es. -es5a4m -es5a2n -e2sc2 -es5ca2n -es1ca -es5che2 -es2ch2 -esci5e2 -es1c2i2 -escut5 -e2s1cu -e3se2a2 -e3se2ct -e5see -e5seg5 -ese4l -es5enc -e3sh4a2 -es2h -e1shi -e5shu4 -esi4a2n -es2i1a2 -es5ic. -e5s2ick1 -es5id3en -es4i2d -esi4de -esi5d2i1u2 -es5ies -esie2 -es3im -e2s3in -e5sion -e1s2io4 -e4s1it -es4it. -es4i4t1s -e3sk1in -es2k2 -e3s4mi -e2s1m4 -e2s4od -es3ol3a -es2ol -es3ol3u -es3on1a -es2o3p -e1sor -es3per3 -es5pir1a -esp2ir -es5pit -es4p2l2 -es3plen5 -esple2 -es5p2ot -es2po -e5s2pr2 -es4s3a2n -e4ss -es1sa -essa2r5 -ess5ee -es4sil -es2so -es2t1a4b2 -est2 -est3a2n -e5sta2r -es5t2au2 -e2sti -est5ifi -es1t2i1f2 -est5igati -est2ig -estig1at -e3st2oc -es5t2oo -est4r2 -es4tud4 -es1tu -e1su -e2s3ul -es4u4r5 -et2a -et3al. -et5all1is -etall2 -etal1li -et3al5o4 -eta5m2e -et2a2m -et2a3p -et3ar1i -eta2r -et5a2r2y -et4as -et3ate -et3ati -et5ay -et3eer1 -etel1l5i -ete2l -e1tell2 -etend5er1 -et2e4nd -etende2 -et5en1i -eter2 -et3er3a -et5er3i1a2 -e1teri -e3tex4 -e2th1 -ethy2l3 -eth2y -2e1t2i2a2 -e3t2ic1u -et1ic -e3t4i4g2i -et2ig -e5tim -et3in -eti4n1a -e3t2ir -et5i2t3iv -e3t2i4u2 -et5o1lo -e5tomet1e -et2om -etom2e -eto1me2t -e2ton -et3on1a -etor3i -et1or -etra5g -etr2 -4e4tral -etr2a5m -et4ra2n -et5re4s4s -etr2e -et1ri -et4r2i1a2 -etrib5a -et2r2ib -e4trim -et1ro -e2t2t4 -et3ter1 -etud4 -et3ud4e -e4tum -et4we -et1w -e2t5z2 -eu3d2i3o5 -eue4 -euk5 -4eum -e3ur1g2 -eu4r -eur5i -eus4 -eu5ten -eu3ter1 -eut3i -ev4abi -e2v3ab -e1v2al5e -ev2a2p3 -ev3ast2 -ev3at -ev5eli -e2vel -eve4n -ev5erat -ever1 -ev5er3en -e4v4er2e4 -ever4er1 -e4veri -e4ves -e1v2i1a2 -e4vi1ab -e2vic2 -evic1tu4 -e5vi2ct2 -evi2d3 -ev5ig -ev4ile -evi4l -ev5is2h -evi2s5in -evis5o -e4v2i1u2 -ev2oc3 -e1vo -evol5e -evo2l -evol5ute -evu4 -e1wa -e4wag -e5w4ay -ew1er1 -e3wh2 -ew5ie2 -ewi2 -ew1in -ew5is2h -e3wit -e1wr -ex5i4c -ex1i -ex4on. -ex1o -exo4n -1ex3p -4ey. -ey4as -ey1a2 -eyl4 -e4y3s2 -ez5er. -ezer1 -ez5e2r1s2 -ez5ie2 -ez1i -1f2a -2fa. -fab4i -f1ab -fa3ce2t -fa2ct2 -fa2c3u -2f3ag -fal2l5in -fall2 -fal1li -5falo -fa5lon -fals5ifie2 -fa4ls -fals2i1f4 -4fan3a -fa2n -fan5tas1iz -fan2t2a -fantas3i -fant3i -5fa2r -far3i -5faw -4f5b -2f5d -2fe. -3feas -fea2 -fe4a3tu -fe2b5r4 -fe1b -3fec -2f2ed1 -5fei -fe1li -fem3i -femin5 -fend5er1 -f2e4nd -fende2 -f5en1i -4f4e2r4ed -fer1 -f2er2e -fer3ee -3fero -fe5r2oc -fer5om -3fe4r1r4 -fer3v2 -2f4es. -fes2s3o -fe4ss -fest3a -fest2 -fe2st5i -fe4t -fet4al -fet2a -fet4in -fet4o -3feu -fe5veri -fever1 -2ff -f1fe -ffec4te -f3fec -ffe2ct -f5fe4t -f1fi -f5f2i1a2 -f3fic -f5fie2 -f1fi2l3 -f2f3is -ff4le -ffl2 -ff3lin4 -f3f2oc3 -f1fo -ffon1i4 -ffo2n -ff2or3e -f3fr2 -ffran2ch5 -ffra2n -4f5h -fi5ance -f2i1a2 -fia2n -f4ib5u -4fic. -4fi4c1al -3fi1c2i2 -4fi4c5s4 -fi5del -fi2d -fid3en -fiel4 -fie2 -fi2er4c2 -fi1er1 -figh2t5 -f2ig -fi2gh -1fi2l -2fin -fin2a -fi3na4l -f2i4nd3 -fin2e -f1i4ng -5finin -f2in1i -fin4n1i -f2in1n4 -fir2m1 -f2ir -f3it1a -f5it3ee -fl2 -3fl2a -fle2s -f3lica -flin4 -3flo -flo5ric -flor1i -3flu -flu1m4i -1fo -4fo. -3f2oc -fo2e -foet3i4 -fo1et -fo1l4i -fo4li2e2 -fomen4t4 -f2om -fom2e -fo1men -fo2n -fon4de2 -fo4nd -3f2oo -fo5r2a2m4 -for1a -for5ay -for5b -for4di -fore3t -f2or2e -5f2orm -for4m3a -fortu5n4a4 -f2ort2 -for1tu -for3tun1 -fo3v -1fr2 -frag5a -fran2t4 -fra2n -fra2r4 -frat2ch4 -fra2tc2 -fre4s -fr2e -fros4t5i -frost2 -fr4uc4 -2f3s -fs4p -2ft -f1t4ed -f4ter. -fter1 -f2t5es -ft2i4et -ftie2 -ft4ine -ft1in -3fu -4fu. -f4u4c -fuel5li -fuell2 -fug4a -fu4min -fu1mi -fu4n2g -fun1 -4fu2r4ed -fu4r -fur2e -fur3n2 -fu3sil5 -fus5o -fu5til -fut2i -4ga. -ga4cie2 -g2a1c2i2 -gadi4 -ga4d4os -ga2do -3gag -3g2ai2 -3g2ale -ga5len -gal2i4a2 -gal5ler1 -gall2 -3galo -gam4bl2 -g2a2m -ga2m1b -gan5at -ga2n -gan2a -4ga2n2ed -gang5er1 -ga4ng -g5ant. -gan4t4r2 -g5an4t1s -g5ar1c2 -ga2r -g4ar2e -gar3ee -gariz4a1 -gar1i -gar1iz -ga5r2ot -gar5p -5g2a4r1r4 -1ga4s -gas5i -g2as3o -gas2ol5 -gas2s5in -ga4ss2 -gast3r2 -gast2 -g1at -g4at. -ga2t5iv -g4a2to. -ga2to -g4atos4 -g4a2t1t4 -g2a2t5u -gaud5 -g2au2 -ga5z1a1 -g2az2 -g1b -g5d4 -2ge. -5geal -gea2 -3gea2n -2g2e4d -3gedi -5ge4d1n2 -4ge1f -1gel -4g2ele -ge4li -gel4in -gel5li -gell2 -ge4lu -2ge2ly -gem3i -5ge1mo -3gen -gen4du -g2e4nd -gen5it -gen1i -gen3o -gen5t2i -ge4o -geo3lo -geo2l -4g2er2e -ger1 -3germ4 -2g4es. -5ge4ss -gest5at -gest2 -3get -get3a -2g1f -2g1g -gg4a -g2ge -g5ge2dl4 -g2g2e4d -g3ger1 -g5ger3er1 -g4g2er2e -gg2i4a5 -g1g2i -g3gli -g2gl2 -g3glu3 -g5g2ly -ggr2av3 -g1gr2 -g4g4ro -2gh -g5h2ai2 -gha2 -gh5en1i -ghe2 -g3ho -g4hos -gh2t -1g2i -4gi. -gi4all2 -g2i1a2 -gi4at -3gib -g2i5c2o2 -g2i4g -gi5ga2n -1g4in5g2i -gi4ng -3g2io2 -gi4or -gi4ot -5g2ip -gi5pa -g4i4s -5gis. -gi2t1 -5gitu -giv5en. -2gl2 -g3la2r -gl2a -5glass. -gl4as -gla4ss2 -glec4 -3g2ler1 -g4leto -g1let -g4letr2 -g4ley -gli5on -gl2io2 -g5l1is4 -3glo -4g5lod -gl2om3 -4glop -3glu -glu5te -glu5t2i -3gl2yp2 -g2ly -2g1m4 -2gn2 -g1n1a -g4n1ab -g5nate -5gn4a2th -g5nati4 -gna5tu4r -gn2a2tu -gn5e2dl4 -g2n2ed -gn5ee -gn3er1 -g1n1i -g4n2i1a2 -g2n3in -gn4in. -g4n2i2o2 -g2no -5gnor1i -gno4s -2go. -5g2oa2 -3g2oc -5god -3goe -go4et -go4ge -g2o1g -4go3gr2a2m -go1gr2 -g5o2i4d -g2oi -g4o3is -go2m2e -g2om -5gon1n4 -go5n2om -3g2oo -goph4 -4gor. -5gor1g2 -4go2r1s2 -g4o2r2y -3gos -gos4t2 -2go4u2 -gour4i -gou4r -g1ous -gov1 -g3p -1gr2 -gr1ab4 -3gr2a2m -4gram2e -gr2a2p -g4r2e -gril4 -gri2m3a -g4ro -gr2o4g -g5ron -gr2op4 -3gru -gru3en -gr2u5i2 -gru2m4b -2g1s -gs4c2 -gs4t2 -g4sti -gth5en1i -gthe2 -g5to -g4u2a2 -gu5ab -5gua2n -3guar2d -gua2r -g5uat -2gue -5gueu -5guit4 -g2ui2 -gui5t1a -gu2ma -gu4mi -3gun1 -g4uras5 -gu4r -gur1a -g4u2r4ed -gur2e -gur4n2 -gur4u -4gu2r2y -gust5a -g4ust2 -2g1w -2gy -g4y2b -5gym -3gyn1 -gyn5o -g5z2 -ha2 -4ha. -h4ac -hadi4e2 -had4in4e -ha2d1in -hae3o -ha2g2e4d5 -ha3g2i3o2 -ha1g2i -hag5u -ha5ic -h2ai2 -hais4 -hak4ine -h2ak -hak1in -hal5ant -hal2a -hala2n -h2a4m -ha1m5a2n -han4cro -ha2n -han1cr2 -ha4n2g -h1an1i4 -h5an1iz -han4t -han2t3a -ha4pe -h2ap -hap3l2 -har1a -ha2r -har5b -har4d -har5die2 -har2ge4 -har1g2 -ha5ri2s1m4 -har1i -har3o -har4t4ed -hart2 -har4ti -has4te2 -hast2 -ha2t5o -haugh2t5 -h2au2 -hau2gh -ha2vel4 -h2av -hav5ersi -haver1 -have2r1s2 -ha1v5o -h1b -h1c -h1d -hdeac5 -hdea2 -h1du4 -he2 -4he. -h2ea2 -1head -3hea2r -hear2ch4 -hear1c2 -heas4t5 -heav5en -he2av -he2c3t4 -he5del -h2ed -hede2 -he3do -heek4 -h4ei -h4e3is -he5lat -hel2a -h5elin -he3l2io2 -he5l2i1u2 -hel4li -hell2 -h3el3o -hem1a -he3men -he1m2e4 -hemis4 -he5m4op -he1mo -hem4p -hende5 -h2e4nd -he3or1 -hep1 -h1er. -her1 -her4as2 -her2b -her2b3a -herb3i -here3a4 -h2er2e -here3o -h5er3e2t2t4 -heret4 -h5erh2 -her5ial -her2i1a2 -h5erin4e -her3in -h1erl -her5om -h4eron -h1e2r1s2 -h5erwa -her1w -hes3t4r2 -hest2 -het1 -h4et3a -het3i -het4t4ed -he2t2t4 -heu2 -h4eum3 -heumat5 -heu1ma -he4v4 -hev5i -hex5o -h1f -h5h -2hi. -hi4a2r -h2i1a2 -h1ic -hi3c4a2n -h4i4cin -hi1c2i2 -h4icl2 -h5ie. -hie2 -h1i1er1 -h4i4e2r1s2 -h1ies -h3ifi4 -h2i1f -h3i3fy -hig4o -h2ig -hi5ka -h2ik2 -hi4l -hi5ma4 -hi5mer1 -him2e -himos4 -hi2mo -h1in -h2i4n4d -h2in2e -hi5n2ie2 -h2in1i -h5in1iz -hi5nop -h2ino -h2i4n1s2 -hio5lo -h2io2 -h4i1ol -h4i1or -h2i2p -hip3l2 -h4ir -hi4r4r4 -hir3r5i -hit4a -h2iv5a -4hl -h3l2a -h1le -h3let -h1l2i -hl2i4a2 -2h1m -h4man3ic4 -h1ma2n -hman1i -h5mica -hm1ic -2h1n2 -hno1cen5 -hn2oc -hn4o3ce2 -4ho. -ho3a2n -h2oa2 -ho4c2o2 -h2oc -ho3don -ho2do -ho5du -ho5ep5 -hol3a2r -hol2a -hold1 -hol4is. -hol1is4 -ho5l4y2s -ho2ly -ho4mag -h2om -ho1ma -hom5in -hom1i -h2o4n -hon5em -ho2ne -ho5neu -hon3ey -hon2g3i -ho4ng -ho5n2io2 -hon1i -hon1o -1hood -h2oo -hoo5r -h4ope -ho2p5r2 -h4op4te -h1o2p1t2 -hor5et -h2or2e -h4or2n2 -horn5i -ho5r2o1g -hort5h -h2ort2 -hosi4 -ho4ton -h2ot -ho1t2o -h2o4u2 -3h2ouse3 -4h1p -2hr -hras5eo -hras2 -hr2as2e -hr2e4 -hre5ma -hr5er1 -hres4 -hri4 -hril2l5in -hrill2 -hril1li -hrim4 -h5rit -h3r2od -hrom4i -hr2om -h2r2y4 -h3rym3 -2h1s -hsi4 -h4s2k2 -ht5ag -ht5ee -ht3en. -ht5e2n1er1 -h4t4ene -ht3en1i -ht3e4n2s -ht5e2o -h2t5es -ht4f2oo -h2t3f -ht1fo -h1th -ht4ine -ht1in -hu4g -hu4mat -hu1ma -hu5mer1 -hum2e -hu4min -hu1mi -hun4c -hun1 -hu4n2k4 -hun4t -hur3i -hu4r -hu3s2i1a2 -huz4 -h1w -h4wart2 -hwa2r -h2y -hy2l -hyl5en -hyle2 -hy2m -hyn4 -hy3o2 -hyol5i -hy1pe -h2yp -hy3ph -hyr4 -hys3te2 -h4y2s -hys1t2 -hy4t -2i1a2 -ia4bl2 -i1ab -iab5ol1is4 -iabo2 -iabol3i -iab5ol1iz -i2a2ch -ia1c3o2 -i2ac2r2 -ia5cri -ia5d4em -i5ae -iaf4 -i2ag4 -i4a3g1n2 -i5a4g5o -ia3gr2 -i3ah -i5ai2 -iale2ct4 -i2ale -ia3lec -i3al1it -ial5li -iall2 -4ial1n4 -i2a3lo -i2a5ly4 -i5a2m1b -i2a2m -ia3m2e -ian2ch5 -ia2n -i3ant -i5ape -i2ap -ia3ph -i2ard -ia2r -4iarit -iar1i -i3at -ia5the2 -i4a2th -i5at2om -ia2to -i2a2t4u -iat3ur4a -iatu4r -i3au2 -i2av4 -ib3era -ib2e -iber1 -ib1i -i1b2i2o4 -ibios4 -ib5li -i2bl2 -4ibo2 -i4bon -ibor4 -i4bose -i5bo4u2 -ib1ri -ibr4 -4ibu -ib3uta -ic3ac -ic5a2do -i4c1al -ic1a2n -2i1ca2r -iccu4 -ic1c4 -4ice -i5ceo -4i2ch -ich4i -ich5i4ng -ich1in -ich5ol -i1cho -4icin -i1c2i2 -i5c2io2 -2ick1 -ic4lo -icl2 -2i2c2o2 -i3c2o3c -ic5ol3a -icon3o -icon1 -i5cop -icoty3le5 -i5coty -ic2ot -2i1cr2 -i4cri -i4cru -i4c2r2y2 -ic4te2dl4 -i2ct -ic1te -ict4ed -ic4ter1 -ict5ic -2icu -icu4lu -ic3um -i5cun4 -i5cut -2i1cy -i2d -id1a -i5d4ay -i1d4e4m -id3enc -id3era -ider1 -i3derm5 -i3d2icu -id3i1f -i5d2ig -i5dil4 -i3dim -id4ine4s -i2d1in -idin4e -idios4 -i3d2i1o2 -id2ir4 -id1i4s4 -id4ist2 -2i4d1it -i1di4v2 -id3li -i2dl4 -id3ol -i1do -idol3a -4idom1i -id2om -id3ow -4idr2 -id5ri -id3ul -i1du -ie2 -4iec -2ieg2 -ie3ga -ie5i -i5ell2 -4iem -2i1en -i2e4n2d -i1er1 -i3ere4s -i2er2e -i2eri -ier3i4n -4ier2n2 -ier2o -i4ert2 -i3e2sc2 -ies3e4l -i1es2t2 -i3e4st. -2i1et -i4et. -ie2t3ie2 -4ieu -i5eut3i -iev3a -iev3er1 -ie1v3o -2i1f -i2fe -if4f2a -i2ff -iff5ler1 -iff4le -iffl2 -i4f3ic. -i4fic3ac -i4f5i4c5s4 -ifi4d -i2fi4n -4i2fl2 -i3fo -i3f2oc5 -if5tee -i2ft -i3fy -2ig -i3gad -ig3a4nd -iga2n -3iga2r -i1ge -i3ger1 -ight5er. -i2gh -igh2t -ighter1 -ight5e2r1s2 -4i1g2i -ign5iz -i2gn2 -ig1n1i -ign2o5m -ig2no -i3gon -ig1or -ig3ot -i5gret -i1gr2 -ig4r2e -i4g5ro -i5gu5it4 -ig2ui2 -ig1u4r -2i1h -ih2y4 -2ii -i5in -i1ja4 -4iju -2ik2 -ik5a2n -ike4b -i2l3a -ila4g -ila5te2l -ila4te -i5l4ater1 -il4a4x -il5dr2 -il4du -i3len -ilesi4 -il3f -il3ia. -il2i1a2 -il3ia2r -ili4arl -i3li1c2i2 -i5l2i1en -ilie2 -ili4er1 -il4i4fe -il2i1f -il4ific -il1in -il5i2ne. -il4ine -4ili3o4u2 -il2io2 -il5i4p1p2 -il2ip -il5i1q -il4ite -il1it -ilit5u -il4mo -ilm2 -i5lon -il3o4u2 -ilth4 -il2tr2 -4ilu -il5ul -i5lum -il5ur2e -ilu4r -il3v -4ilym2 -i2ly -ima4c -im2ag -im3a4ge -im1al -i2m5a2m -i5m2as -i4mat4ed -i4ma2t3in -im2a2t5u -im1i -i3m2ie2 -im4ine -im5ino -im5me2s -i2m1m2 -imm2e -i2mo -i5m2o1g -i3m4on -im5oo -i3mos. -impar5a -im1p -im3pa -im1pa2r -impar2ad5 -im5p2ie2 -im2pi -imp2o2t5 -im3po -im5pr2 -im3pu4 -im1ul -im5um -in3ab -in1a -4inace -ina2c -in4a2do -in5a2gl2 -in3a2ir -in2ai2 -ina4l -4inal1it -i1n5a2m -in3a2n -in3ap -in4a2r2s2 -ina2r -i3nas. -in2as -4in2a2ta -inat1or5 -ina2to -in3au2 -in4aw -2inc -inc4t2u1a2 -in2c1t -inc1tu -2i4nd -in5da2r -in2da -inde5p -inde2 -inde4s5 -in1de3t -indeterm5 -indeter2 -in5dro -indr2 -4inea2 -4i2n2ed -in5ee -in5eg3a -4in5eo -ine4s -in3esi -ine5te -4ineu -inev5 -infilt5 -in3f -in1fi2l -infol4 -in1fo4 -4in3fu -4ing2a -i4ng -in5gal -4inge -ing5ha2 -in2gh4 -4in2g2i -4ingle -in2gl2 -4ingli -4in1go -4in1gu -in2g3um -2in1i -in5ia. -in2i1a2 -4inic -in4i1c2i2 -in3ion -in2io2 -in4itud -4i4n2k -ink4ine -ink1in -4i4n1l2 -2in1n4 -2ino -4i4no. -in3oi -i5nole -4inos -i3n4os. -in5ose -in3osi -4in1q -i4n1s2 -in4s2ch5 -ins2c2 -inse2 -inse2ct5 -in5sec -insec5u -in3si -5ins2k2 -insolv5 -ins2ol -in4tee -in1te -int5e4ss -in2t2es -in3til -int2i -int5res -in1tr2 -intr2e -intu5m -in1tu -2in2u -in5ul -in5um -in3un1 -in3u4r -invol5u -in3v2 -in1vo -invo2l -2io2 -io3a2ct4 -i2oa2 -i1od -iod3i4 -io2d5o -ioe4 -io3gr2 -i2o1g -4i1ol -io3ma -i2om -i4oman1i -io1ma2n -io3mo -i5ope -io3ph -i5o1po -io2p4s2 -i1or -ior2a4m4 -ior1a -4i2or2e -4iorit -ior1i -5ior1iz -4iorl -ior4n2 -io3sc2 -i3ose -i3osi -i4oso -i4o5sta -iost2 -i3ot -iot4a -i4o5th -iot5ic -i4o5tr2 -i4oty -i4our. -io4u2 -iou4r -i4ou2r2s2 -i5o4x -2ip -ip3al -ip2ap4 -ipar3o -i1pa2r -ipart5ite -ipart2 -ip1at -i3p2e4nd -i1ph2e4 -iphen3 -i5pheri -ipher1 -iphi4 -i4phu -ip3i2d -i5p2il -ip3in -ip4in4e -ip2ir4 -ip5is -ip1i4t -ip4iti -ip3lin -ip2l2 -ip3lo -i3po -i4p2o1g -i4poli -i4p2om -ip4o2n3 -i4pow -ip2p2l2 -i4p1p2 -ip3pli -ip4r2e -i1pr2 -ip5tor1i -i2pt2 -ipt1or -ip1ul -i5put -i2p4y4 -2iq -i3q2ua2 -iqu4 -2ir -ir1a -ir4abi -ir1ab -ira4c -ir4ae. -ir4ag -ir4al1in -ir4al1li -irall2 -i5r2a3so -iras2 -irassi4 -ira4ss2 -ir4ay4 -ird3i -ire3a4 -ir2e -ir3ec -ir5ee -irel4 -ire5li -ires4 -ir5e4ss -ir1i -ir2i4d -ir4im -ir4is. -5ir1iz -irl5i4ng -ir5o2ch -ir2oc -ir5ol -ir3om -ir4q -i2r2s2 -ir5ta -irt2 -ir5tee -irwo4m2e -ir1w -ir1wo -ir3w2om -i4sa -is5ad -is3age -isa4g -is1a2l -is3a4m -is1a2n -is3a2r -is5av -4i4s3b -i2s3c2 -is5chi -is2ch2 -isci5c -is1c2i2 -4i1sec -ise5cr2 -is3ell2 -4is3en -is2er1 -is5er2e -i2s3et -4iseu -is3ha2r -is2h -isha2 -ish5ee -ishe2 -4ish3io2 -ish3op -is5hor -2is2i1a2 -is5ic -is3ie2 -4isim -is3inc -i2sin -4is1is2 -is4ke2 -is2k2 -i2s1l4 -islun4 -2isma -i2s1m4 -is1on -is5on1er1 -is4o2ne -is2o5p -is1p -i3s2ph2 -5is1pr2 -2i4s1s -iss5ad -is1sa -is4sa2l -is5sa2n -is4s4iv -is1s4o -4ista -ist2 -is4tal -ist5enc -iste2 -ist5ent -is5ter3er1 -i4s2t2er2e4 -ister1 -4is1th -is4t3ic -4i4s2tl -i4s1to -4is4t2om -is1tr2 -3is2t4r2y -4is4ty -i5s2ul -is3u4r -2is2y -it1a -i2t5ab -ita4c -4i1t2ai2 -it3a2m -it4an2a -ita2n -it4as -it3at -i3te2ct -it3ee -it3enc -it3ent -it3era -iter1 -2ith -itha5l -itha4 -ith5i -i5thol -i1t2h3r -ith2y5 -2i1t2i2a2 -it2i4c2o2 -it1ic -it5ic1u -it1ie2 -it3ig -4i1tim -it4in. -it1in -i4t4i4n1s2 -4itio. -i1t2io2 -4itio2ne -i5t2i4q -4i5tit -i2t3iv -it4li -i2tl -it5lo -4i2to. -it5ol -2iton -it1o4u2 -2itr2 -it5re4s4s -itr2e -i4tric -2i2t1t4 -it4tit -it3ti -itu4a4ls -it2u1a2 -itu1al -it5ua2r -4itue -it1ul -it1u4r -it3us -2i1u2 -i3um -iur5e -iu4r -2iva -iv5anc -iva2n -iv1at -i4v2ed -iv5el. -i2vel -iv5el3i4ng -iv5e4ls -i4ver. -iver1 -iv3eri -i4vers. -ive2r1s2 -iver5sa2l -ives4 -iv3et -i4vie2 -iv3i1f -i5vil1it -ivi4l -ivil3i -5ivi4st. -i2v5ist2 -5ivi2s4t3s -iv1i2t -i2vo -iv2oc3 -i5v2or2e -2i1w -2ix -ix3o -i5ye -1iz -4iz2ah -iz1a1 -iz3i2 -2izo -iz5oi -2izz2 -1ja -2ja. -3jac -ja2c5o2 -jac3u -jag5u -jal4 -ja5lo -ja5pa2n -j2ap -j4apa -jel5l2a -jell2 -jeo2 -jeop3 -4jes -jeu4 -jew3 -2ji -3j2ig -jil4 -jill5 -5jis. -3jo2 -4jo. -jo1c5o2 -j2oc -joc5u -jol4e -4jr -4js -ju1di -j2ui4 -ju5l -ju3n2i -jun1 -ju2s1cu4 -j2usc2 -jut3a -ju1v2 -k4abi -k1ab -k2a5bu -ka2ch4 -k3a4g -kais5 -k2ai2 -ka4l -ka5lim -kal4is -k4a2n -k2a3o -k2ap4 -kar4i -ka2r -1kas. -kau4r4 -k2au2 -k2av4 -k1b4 -k1c -kc2om4 -k1c2o2 -k5d2 -k1do4 -kdol5 -4k2ed -ke5da -k5ede2 -3kee -ke4g -k2e4n4d -ken1o4 -ke2p5t2 -ker5a -ker1 -k4er2e -k5erel -k4er4j -ker5o -kes4i -ket5a -key4wo -key3w -k1f -kfu4r4 -k3fu -k3ho -5k2i1h -ki2l -kilo3 -k1in -k2in. -3k2i4nd -kinema4 -kin5et -k3i4ng -k2in4i -k2i4n1s2 -kir3m -k2ir -ki4r4r4 -kis4 -3kis. -k1is2h -ki2t5c2 -k2i4w -kk4 -k5ker1 -k2l2 -k3l2a -k5lea2 -k3ler1 -k3let -k3li -k3lo -k1m -kn2 -k2no -1kn4ow -k2o5a2 -kol4 -ko5m1i -k2om -ko5pe -k1p -k5ro4 -k3ru -4k1s -k3sl4 -ks2mi -k2s1m4 -ks4t2 -k1t -ku4r5 -k5v -k1w -3kyl -l2a -4la. -5la4a -lab5a2r -l1ab -l2aba -la1bel4 -l2ab2e4 -5lab1r4 -l4ac -la2c2a -la5ceo -la5cer1 -la4ch -la2c2o2 -5la5col -lac5on1 -la3cu -la4de -l5a2d1m -l4ae -l4af -la3ger1 -la4g4i4s -la1g2i -la2g3r2 -5l2ah4 -la4ic. -l2ai2 -la1ic -l4al -4l2ale -5laman3dr2 -l2a2m -lama4n5d -la1ma2n -la5mel1li -lam2e -lamell2 -lam4ie2 -lam1i -la1m1o -l5amu -lan3at -la2n -lan2a -la4n2d -3land. -land3i -3lan4d1s2 -lan4er1 -lan3et -lan5tine -lant2i -lant1in -lan4t4r2 -l2a4p -lap1i4 -lar5a2n -la2r -lar5de -4la2r4ed -lar2e -l4as -lat5al -l2a2ta -la4te -5latil1is -la4t1i4l -5latil1iz -5lat2in1i -la2t3in -lat5us -l2a2tu -l4au2 -5lau4r -lav5at -l2av -l4aw -4l2az2 -l3b -lb2e4 -l4bit -l4by -l1c2 -l2c1at -lce4 -l1cen4 -l4c2er2e -lcer1 -lch4e2 -l2ch -l3da2r -l3d2ed -l3de1h2 -l5dera -lder1 -ld3est2 -l5dew -ldi2 -l3die2 -ld4in4e -l2d1in -l5di5ne4s -ld3is2h -ldi4s1 -ld5li -l2dl4 -l3do -4le. -3leagu -lea2 -lea4g -le5a1t2io2 -leav5er1 -le2av -l3eb5ra -le1b -le2br4 -le3c2a -le5cha2 -le2ch -lect5ica -lect2i4c -le2ct -2l2ed -le5dr2 -leg1a -l3ega2n -3le2g1g -le4gin -le1g2i -leg3o -le3gra -le2gr2 -lek4 -4l4e4l2ed -l2ele -lel5o -le1lu5 -lem5enc -le1m2e4 -le1men -lem3is -l5em1iz -5le2m1m2 -l3e4m1n2 -le2mo -l4em5on -l5en2da -l2e4nd -len5da2r -lend4e2 -len4do -l4e1ne -le5n2ie2 -len1i -len3o -4len1t2io2 -lent2i -l4en5u -le3on -leo4s2 -le5q -2ler1 -le5rec -l2er2e -5l4er2i1a2 -l4eric -le5r2ig -ler3om -leros4 -ler3ot -4l4es. -le3s2c2o2 -le2sc2 -3le4s4s -1let -le5tra -letr2 -le5tr2e -5le5tu5 -leu4r5 -2lev -l3eva -5leve -lev5it2a -levi2t -le4wi2 -l5ex1a -1ley -lf5i2d -l2fo -lf3o2n -l1g2 -l4gal -l4gem -lg2i4a2 -l1g2i -l4gi2d -l4g2oi -l3h -4li. -li4an1i -l2i1a2 -lia2n -lias4 -lib1r4 -l1ic. -5l4i2ch -li4cie2 -li1c2i2 -5li5c2io2 -l3ic3on1 -l2i2c2o2 -lict4o -li2ct -l2i4cu -l3id1a -li2d -l4ida2r -5lid3i1f -3l4ieu -lie2 -l4i2fe -l2i1f -l4i3fo -lift5er1 -li2ft -1l2ig -li5ger1 -li1ge -light5i -li2gh -ligh2t -5l2i1h -3l2ik2 -1l4il -lil4i -li2m2b -limet4e -lim2e -li1me2t -lim4p -l4i2na. -lin1a -l4in2as -l2i4n4d -l4ine -5l4in3ea2 -lin4er. -lin1er1 -lin4e2r1s2 -lin4ger1 -l4inge -li4ng -l4in2g3i -5lingt -3l4in1gu -3l4in1q -lint5i -3li2o1g -l2io2 -l4i4ol -li2o3m -li3ot4 -li3o4u2 -5liph -l2ip -li2pt5 -l2i1q -3l2ir -l1is -l4is2k2 -5lisse -l2i4s1s -l1it -l2it. -l3it5a -5liter1 -3l2ith -5l2i1t2i2a2 -3l2itr2 -lit4u -l4iv -l5iv1at -l2iva -liv3er1 -liv5i2d -lkal5o -lka4l -lk5at -lk3er. -lker1 -lk3e2r1s2 -ll2 -l1l2a -ll2a4ba -ll1ab -lla2ct4 -ll4ac -l5l4as -l4l4aw -l5le1b -l1lec -l1leg -l3lei -l1lel -lle5m -l1len -l3lep -l3leu -l3lev -ll3f -l1li -lli5a2m -ll2i1a2 -lli4a2n -llib4e -llic4 -l4licl2 -ll2i5c2o2 -l5lie2 -llig1at4 -l1l2ig -l2lin -l5lin. -l3lin1a -l3l4ine -l5l2io2 -ll4i5v -ll3m2 -l1lo -lloc3a -l1l2oc -lloc5u -l1lo2q -l4lov -llow5er1 -ll3p -l4l3s -ll5t -l1lu -llun4 -l5ly1a2 -l2ly -l3ly1c -l3ly1g -l3ly1h -l3ly1i -l5lym2 -lm2 -l1ma -l1m2e -l4mer1 -lm3i4ng -l5m2i3p -l2m3od1 -l1mo -l1n4 -l3ne -lneo4 -2lo. -5load -l2oa2 -5l4ob3a -1l2oc -loc3al -loc2a -loc5ul -lo4cus. -lo1cus -2lo1cy -l3od1i4s2 -3lo3dr2 -1l2o1g -lo5ga2n -lo2ga -4loi. -l2oi -lo5m1i -l2om -lo2m4m2 -lon4al -lon1a -lo2n4e -l5onel -lo5ney -long5in -lo4ng -lon2g2i -3l4o1n2i1a2 -lon1i -lon2i4e2 -l3onis -l3on1iz -loom5er1 -l2oo -lo2o4m -loom2e -lop4e -5lo5pen -l3o2p1m -1lo1q -l4o2r4ed -l2or2e -lor5i3at -lor1i -lor2i1a2 -lor4i2fe -lor3i1f -lo5rof -loros4 -l4os. -lo1so -lo4ss4 -los5sie2 -lot5at -l2ot -loth4ie2 -l4oth -lo5tu -5lo2up -lo4u2 -lp1at -lp3er1 -lph2 -l5phe4 -l3ph1i4n -l2pho -l3p2ie2 -l3pit -lr4 -l3ri -l3ro -l5ru -4ls -l5s2a4m -ls5a2n -lsi4f2i1a2 -ls2i1f4 -lsi4m -ls4is2 -l5s2k2 -ls4p -l1s2t2 -lt4an3e -lta2n -l4ta4ng -lt5ant -l5ta2r -l1te -l4tei4 -lter2n3 -lter1 -lth3i -lt2i4c2i2 -lt1ic -ltim4a -l1tim -lt1in4 -lti3t -l3t4iv -lt4or -l1tr2 -ltram2ont5 -ltr2a2m -ltra3m4on -ltra1mo -l1tu -l4tus -4lu. -l2u1a2 -lu4ch4 -l4uc -lu2c5o2 -luc5ra -lucr2 -lu4cu -4lue -lu1en -lu5er1 -l2u1i2 -lu4it -lum4bri -lu2m1b -lumbr4 -lu4mo -5lum2p -lu2m5u -lunch5eo -lun1 -lun2ch -lunche2 -5lune -l3unta -lu3or1i -5l2up -3lur3o -lu4r -lus2k5 -lu4ss4 -lut5a2n -4lut5a2r -5lution1iz -lut2i -lu3t2io2 -lution1i -lu5t2oc -lut2o -lut5r2 -lu1v2 -lv5ate -l5vet4 -l4vi -l4vor -l1vo -l3w -lx4 -2ly -4ly. -ly1c -ly4ca -lyc4l2 -ly2c5os -ly1c2o2 -lym2 -lymph5 -lym1p -l2yp2 -ly4pa -lypt5o -ly2pt2 -3lyr -lys5er1 -l4y2s -3ly3w -3lyz -lz4 -4ma. -m4ac2a -mac3ad -ma5ch2in2e -ma2ch -mach1in -5mach2y -ma4cis -m2a1c2i2 -ma2ct4 -4mad. -4mada -4ma4d1s2 -ma4ge -5m4a2g1n2 -2mago4 -2m2ah -ma5ho -3m2a4i2 -4m4ai. -mai2d3 -5m2ak -mal3a4p -mal2a -mal5ar1i -mala2r -5m2ale2 -ma2l5ed -mal3e1f -m3al1g2 -m3al1is -m4al4is. -mal3le -mall2 -mal4li -2m2a2m -mament4 -ma3men -mam2e -m5ament. -1ma2n -3m4an. -man3a -man5da2r -ma4nd -man2da -man3dr2 -man3ic4 -man1i -man4ica -ma5n2il -m4a4n2s -man3te2l5 -man2te -2m2ap -m3aph -1ma2r -5mara2n -mar5ol -ma5ro4n -ma3r2oo4 -mar5ri -m2a4r1r4 -mar4shi -ma2r2s2 -mars2h -mar3v2 -ma3son -m2aso -massi4 -ma4ss2 -mass5i4ng -mas2s1in -3mas1t2 -ma4s4t4ed -maste2 -mast4ic -mas4t1in -m4at. -m4aten -m4a3ter1 -mater5n4 -m4at1it -mat4iti -m4atiz1a1 -mat1iz -m4a3t2o1g -ma2to -mat5om -ma3top -m4a4t1s -3m4a2t1t4 -ma5tur2e -m2a2tu -matu4r -m2av4 -2m1b -mba2t4t4 -m2b4d -m5bec -mb2e -m5ber3er1 -m2b2er2e -mber1 -m4be2r2y -m4be1s2 -mb2i -m2bic -m5b1il5 -m4b3i4ng -m4b2is -mb5ist2 -mbival5 -mb2iva -m5b2ler1 -m2bl2 -m3bli -mbru4 -mbr4 -mbu3l -mbu4r4 -m1c -m5d -m2e -2me. -mea5g -mea2 -me5a4nd -mea2n -me4ba -me1b -me4b2i -2m2ed -4med. -3me1d2i1a2 -med5ic1at -4medie2 -m5ed5ies -3med1it -me4do -m5e2d2y -me2g -5meg2a1 -meg1a5t -4m2ele -mel5ee -mel5ler1 -mell2 -mel3on -mel4t -melt5er1 -mel1te -me2m -4m5e1m2e4 -1men -3men. -2men1a -men4ag -mend5er1 -m2e4nd -mende2 -men1d5o -m4e1ne -ment5or -men2t2o -5men4t1s -5me2o1g -me4p -m5era2n -mer1 -4m2er2e -mer4i1a2 -2me2s -mes5en -me5s2i4a2 -mes5q -3me2sti4 -mest2 -1me2t -meta3t -met2a -met1e -4met4ed -meth4i -me2th1 -met1i4c -met5i1c2i2 -met3o -met3ri -metr2 -m1f -4m3h -4mi. -m1ic -m4i4cin -mi1c2i2 -m2i3c2o2 -3micro -m2i1cr2 -m4i2ct -mi3cul -m2icu -mi4cus -m4idi -mi2d -mi2d4in -mid5on -mi1do -mi5fi -m2i1f -mig5a -m2ig -migh5ti -mi2gh -migh2t -mi2gr2 -4mij -mi5ka -m2ik2 -m2il -m3i2l3a -mil4ad -4m5ilie2 -mil5ies -3mill2 -mi5lo -mil4t -3m2im -mim5i -5m2i4n4d -mind5er1 -minde2 -min4er. -min1er1 -min4e2r1s2 -m4ing5li -mi4ng -min2gl2 -min5ie2 -m2in1i -m4init -min3ol -m2ino -1m4int -minth5o -minth2 -m2i3o2 -m2i3p -mir1ab4 -m2ir -mir1a -mi5r2acu -mira4c -m2is. -m4i2s3c2 -mi4se -4misem -mis3ha2 -mis2h -5missi -m2i4s1s -m3i4st. -mist2 -mis4t1in -m3i2s4t3s -mi2t -m5ita2n -mit1a -4mity -3m2i1u2 -5m2ix -4m1l -mlo5cut2i -m1l2oc -mlun4 -2m1m2 -mma4n4d -m1ma2n -mmand5er1 -mmande2 -m3medi -mm2e -m2m2ed -mmel5li -mmell2 -mmet4e -m1me2t -mm2ig3 -mm2in3u -mmis3 -mmob3 -m1mo -m5m2oc -mmor3 -mmut3a -4m1n2 -mn2i1f4 -mn1i -m4nin -mn2i5o2 -mnis4 -mno5l -1mo -4mo. -2m2oc -mod1 -mod5ifie2 -mod2i1f -mogast4 -m2o1g -mo2ga -mo3ga4s -mo4go -mo2g5ri -mo1gr2 -m5o2ir -m2oi -mok4i -mol3a -4molog. -mo1lo -mo1l2o1g -4molo2g1s -4molo2gu -mo3ly -m2o1m -mo4mis -mom1i -m4on -mon1a4 -4m4o2n2ed -mo2ne -mo4n1g -mo4no -mono1lo4 -monolo3g5i -mono1l2o1g -m4op -mophil5i -mo5phi4l -m1o2p4t2 -m3or1ab -mor1a -m3orat4 -mor4a2to -m5ord -mo5rel -m2or2e -3mor2i1a2 -mor1i -m5or1iz -mor5on -3morp -3mor2se -mo2r1s2 -mor5tal -m2ort2 -m4o3sp -5most2 -m4o3sta -2m1ous -mo4u2 -m1p -m3pa -m4panc -mpa2n -m4pant -mp4a2th3 -mpel5li -mpell2 -m5per3er1 -m4p2er2e -mper1 -mp4er3i -mpet5it -m3pet -mpe2ti -mphal5o -mpha2 -m4phe4 -m4p4h2l -m2pi -mp5i2d -m5p2ig -mp3i2ly -mp2il -mp1in -m3p2ir -mp3is -m3po -mpol5it -mp2o2t -mpov5 -mp3to -m2pt2 -mp5tr2 -m3pu -m5q -m3r -m4r2y -4m1s -m5sel5f -m5si -ms2ol4 -mtu4 -muc4k4e -m4uc -muck1 -mu2ff4 -muf2 -mul1t2 -m5unc -mun1 -mu5n2io2 -mun2i -mun3is -mus5c2o2 -m2usc2 -mu4se -mus5ke2 -mus2k2 -mu3til -mut2i -m1v -m3w -2my -5my3c -my4d4 -my3e -3my1i -5mys1t4 -m4y2s -3myt -n1a -2na. -na2c -na2ch4 -na5cious. -n2a1c2i2 -nac2io2 -nacio4u2 -na5ciou2sl4 -nac4te -na2ct -nac5t2iva -nac2t1iv -na5cular1i -nacu1l2a -nacula2r -na4d4a -nadi4 -nad4op -na2do -n2ae. -naf1fi2l4 -na2ff -naf1fi -nag4a -n4a3gen -5n4a5geri -nager1 -na4g2i -n5ago -5n4a3gr2 -5n2ah -5nail -n2ai2 -na5iv -n2ak2 -4na2l2ed -n2ale -n5al1g2 -n4al2i1a2 -n2a3ly4 -1n2a2m -3nam2e -na4m4n2 -na5n2as -na2n -n1an2a -nann2ot4 -nan5no -nan1n4 -nan4t2a -nan5t4ed -nan2te -nan4t2o -n2a5o -4n4ard -na2r -nar5tisti -nart2 -nar2t1is -nartis2t2 -n2as -nas5i -nas5p -na4s3s2 -nas5te2 -nast2 -nat5al -n2a2ta -na5ta3t4 -n4atee -na3the2 -n4a2th -nat4h4l -nati4 -n4a4ti. -nat5i2c -n4a2to. -na2to -na3t2om -na4tos4 -n2a2t4r2 -na5tur1i -n2a2tu -natu4r -naugh5ti -n2au2 -nau2gh -naugh2t -naus3 -3naut -naut3i -n2a2v -na5vel -n3b4 -nbarric5 -nba2r -nb2a4r1r4 -nbar3ri -nbe2au4 -nb2e -n3bea2 -nbe4n -nb4e1ne4 -nbet4 -nbit4 -n1c2a -n4cal. -nc1al -ncarn5at -n1ca2r -ncar2n2 -ncarn1a -ncel4i -ncen4t5ri -n1cen2 -n3cent -ncen1tr2 -n4cept. -n1cep -nce2pt2 -n3cer1 -nc2er4e -n4c4es. -n5ce2t -n5cey -n3cha2 -n2ch -nch4ie2 -n3cho -nch5o1lo -n3chu -n4cic -n1c2i2 -ncid5en -nci2d -n4c2i3f -ncip5ie2 -nc2ip -n1c2l2 -n4cles -nc2le2 -n3c2oc4 -n1c2o2 -nco5pat -nco1pa -n1cr2 -n2c1t -nc4t1in -nct4ivi -nc2t1iv -nct2o -n1cu -ncu4lo -n4cun1 -n4curvi -n1cu4r -ncur1v2 -nc4us4t2 -n1cus -4nd -n2da -n3d2a4c -n3dal -n4d4ale -n3d2a2m -nd3anc -nda2n -nde2 -n3dea2 -nde3c2i2 -n3dec -n1d2ed -nde4l -ndeleg4 -nd2ele -nd3enc -ndepr2e4 -nde1p -nde1pr2 -n3derl -nder1 -nde4s -ndes5cr2 -n5de2sc2 -n5dez -nd4hi -n4d1h -n1dic -nd2ic5u -ndid5a -n1di2d -n3die2 -nd5i2ly -ndil4 -nd4in4e -n2d1in -nd3ise -ndi4s1 -nd5is4i -nd5ism. -ndi2s1m4 -n2d5ity -nd3ler1 -n2dl4 -nd1li -n5d2oc -n1do -nd2or4 -n2do4u2 -nd5ou4r -ndrag5 -ndr2 -n3dr2a2m4 -n5dron -ndu4b -n1du -nduc2t5iv -n3d4uc -ndu2ct -n4dun1 -nd2we -n2d1w -n3dy1i -n2d2y -2ne. -ne3alo -nea2 -n3ea2r -ne2b3u -ne1b -5neck1 -ne4cl2 -ne2c2o2 -n5ec1t2om -ne2ct -2n2ed -3nedi -ne4du4 -neg3a -ne3go -5negu -n4eis4 -2n2ele -ne5l2i1a2 -ne1l2i4g -n4e2ly -ne2mo -4n1en -n3e4nd -neo3l -neon4 -ne2p -n1er1 -4n4e2r4ed -n2er2e -5neri4ng -ner3in -ner5o -ne4r4r5 -ner2v2 -ner2v5in -2n4es. -n1e2sc2 -ne3s2i1a2 -1ne4ss -n1est2 -nes3t4r2 -net3a -net3ic -ne4t2o1g -net1r2 -neuma5to -n4eum -neu1ma -neut5r2 -nev5er1 -n4ew -news3 -n4eys. -ne4y3s2 -n3f -n1fo4 -nform5er1 -n5f2orm -nform2e -nfor1tu5 -nf2ort2 -nfra2n3 -n1fr2 -4ng -ng2a -n4gae -n5gee -n3geri -nger1 -n5gero -n2gh4 -n2g2i -n5gic -ngi2o4g -n3g2io2 -n5glem -n2gl2 -n3glie2 -n5gl2io2 -n2g1n2 -n1go -n4g2r2y -n1gr2 -n1gu -n2gum -n1h2 -nh1ab3 -nha2 -nho4 -nh2y2 -nhy1d5 -n1i -4ni. -3niac -n2i1a2 -ni3ba -n4ic1ab -ni4cen2 -n4ice -4nicl2 -nic2t5a -ni2ct -ni4cul4 -n2icu -ni4d2i1o2 -ni2d -n2ie2 -ni4e2r1s2 -ni1er1 -ni2f4f -n2i1f -ni2ft4 -nif5ti -n2i2g -night5i -ni2gh -nigh2t -n3i2g1m4 -3ni2gn2 -nik5e -n2ik2 -n2il -ni2l4a -n3im1 -n4im2e -5ni1me2t -n4ine4s -nin4j -5n2in1n4 -n4in2u -5n4i1ol -n2io2 -ni1o4u2 -3nipu -n2ip -5n2iq -n4is. -n4is2k2 -ni2s4l4 -nis4o -n5i4s1s -nis5ter. -nist2 -niste2 -nister1 -nis5te2r1s2 -nit2ch4 -ni2tc2 -ni4te -ni3tho -n2ith -n4itos -ni5tra -n2itr2 -nit5res -nitr2e -ni3tri -nit4u4r -n2iv -n2iv4a -ni3vo -niv2oc4 -niz5en -n1iz -n1j -nj2a2m2 -n1ja -njur5i -nju4r -4n2k -nk5a2r -n5ker5o -nker1 -n3key -nk5i1f -nk5i2l -4n1l2 -nla4n2d5 -nl2a -nla2n -n3le -n1let4 -n3m -nm4a3ter4 -nmor5ti -n1mo -nm2ort2 -n1n4 -nne4 -nnel5li -nnell2 -nnerv5a -nn1er1 -nner2v2 -n3n1i -nni3ki -nn2ik2 -nnov3 -n5ny1i -n2ny2 -4no. -n5ob2i -no5b1il -no2b4l2 -no5blem -nobser4 -no4b1s2 -n5ocu1l2a -n2oc -no4di -n4o2d2y -noe4c -no4f2a -nois5i -n2oi -n4ois -n5ol. -no3l2a -nol4i -no2m3al -n2om -no1ma -1nom1i -no2mo -4no2ne -3n2oni4c -non1i -5nood -n2oo -nop5i -nora4t -nor1a -nor5di -nor4i1a2 -nor1i -nor4is -nor3ma -n2orm -n4oro -n2or4t2 -n4os. -nos4o -n4o3sp -not1a -n2ot -3note -n1o4u2 -n4oug4 -3n2oun1 -2nous -nou5v2 -nova4l -nove2 -no2v3el -no4v2el5e -n4ow -now5er1 -now3l -n3p4 -np2il4 -n1pl2a4 -np2l2 -npoin4 -n1po -np2oi -npo5l2a -n3p4os4 -npri4 -n1pr2 -n1q -n4que1f -nqu4 -n1r -nre4i4 -nr2e -nre3m -nres5t4r2 -nrest2 -4n1s -ns2c2 -n2s2c2o2 -ns3c2ot -n4s1cu -n5sec -nsec4te -nse2ct -n2s2es -n5seu -n3s2h2 -n2si -n4s3ib -n4sic -n5s2ick1 -n3s4i2d -n3sie2 -ns5ifi -ns2i1f4 -ns3i4ng -n2sin -n3s2io4 -n3s2is2 -ns1i2t -n4s3iv -nsolu4 -ns2ol -n5son -n4s2or2e -n4s3o2r2y -n3sp2ir -n3s2t2 -nsta4 -nstil4 -n3su -nsur4e -nsu4r -n3swa -nsw2 -ntab4u -n2t1ab -nt3age -nt1al -n4t3anc -nta2n -nt5a4nd -ntan5eo -nt4ane -n4t3ant -nt4ar2i1u2 -nta2r -ntar1i -n5tas1is2 -ntas3i -nt3as4t2 -nt1at -nt5a2th -nt3ati -nt5a2t1iv -n5t2au2 -n1te -n4tec -n4tee. -n4tees -n3te2l -ntend5en -nt2e4nd -ntende2 -n4te2o -n4ter. -nter1 -n3teri -n5ter2n2 -ntern5al -ntern1a -nter5nat -nth2 -n1the2 -nt4her5 -nth5in2e -nth1in -nt2i -n2t4ib -n4t3ic. -nt1ic -n5ticis -nt2i1c2i2 -n5tic1iz -n4ti4c5s4 -nt2ic4u4 -n3ti2d4 -n1tie2 -n4tify. -n1t2i1f2 -nti3fy -n3t2ig -nt5ilati -nti2l3a -n5t4ill2 -nt3i4ng -nt1in -nt5ing. -n1t2i3p -n4ti1pa2r -n4tis. -n2t1is -nt3i2s1m4 -nt3is2t2 -n5ti1t2io2 -nt3iz -n2tj -n1t2o -n3t2om -nton1i4 -n5top -n1tr2 -ntr2a3d -nt3ral -n4tran2t -ntra2n -n3trat -nt5re4s4s -ntr2e -n2t3ril -ntrol5ler1 -ntrolle2 -ntroll2 -n5trym -n2t4r2y -n1tu -n3t2u1a2 -ntub5 -ntup5li -nt2up -ntup2l2 -n5tu4r -n2ty -n2u -n2u1a2 -5n4uc -3nud -nud5i -nu3en -nug4a -n2u3i2 -nu4is -5nuk -n4u1lo -n3ult -nul1tim5 -nu1m2e -5numenta -numen4t -nu1men -5numer1 -5nu1mi -3nunc -nun1 -nu3tat -n5u2t1iv -nut2i -nu4t2o -nu1tr2 -n3v2 -nve2 -n2vel3 -nv4e1n4e -nven5o -nver2s5a2n -nver1 -nve2r1s2 -nvi4t -nvoc5at -n1vo -nv2oc -nvoc2a -n5w -nwin4 -nwi2 -n3w2om4 -n1wo -n2x4 -2ny2 -5nyc -nym5i2t -nyth4 -n1z2 -nzy4 -2oa2 -o5ace -o3a2ct -oad5er1 -oad5i -o3ag -oak5er1 -o2ak -o3ales -o2ale -oal4i -oal5in -o5al1it -oan4t -oa2n -oap5i -o2ap -oar5er1 -oa2r -oar2e -oar4se -oa2r2s2 -oast5er1 -oast2 -oaste2 -o2a2t5a -oat5ee -o4at5er1 -4oba -o1be4l -ob2e -ob2i -ob3i4ng -2o1b2i3o2 -ob3it -o3bl2a -o2bl2 -ob1li -4obo2 -ob3oc -o5bol -o5b2ot -o3bra -obr4 -obr2om4 -o2b5t -ob3ul -o3bus -2oc -oc2a -o4c1ab -o3cad -oc5ag -o5cal1li -oc1al -ocall2 -o4c5a2t1iv -oc1at -oc5a2to -4o3ce2 -o4cea2 -ocen5o -o1cen2 -ocess4i -o5ce4ss -och4e2 -o2ch -och5in -o3ch2o4n -o1cho -ochro4n -o3c2hr -o5chu -oci3ab -o1c2i2 -oc2i1a2 -oci4al -o1cl2 -o2c2le2 -o1cr2 -oc2r2e3 -o2ct2 -oc2te -oc1to -ocu4lu -ocum4 -oc5uo -ocu4ss4 -o1cus -ocus5si -oc3ut5r2 -o1cy -o5cy4t -ocyt5o -od3al. -o3de4c -o5de3g -ode4ga -o5d2e4n4d -o3dent -odes4 -od3ica -o4d1ie2 -od3iga -od2ig -od4il4 -od1i4s2 -o3d2is5i1a2 -od5it -5od1iz -od3li -o2dl4 -o2do -od5o1lo -o2d5ous -odo4u2 -o3dro -odr2 -od5ru -o2du -odu5cer1 -o3d4uc -o4duct. -odu2ct -o4duc4t1s -od3ul -o5dyt -o2d2y -oe3a2 -oe4b2i -oe1b -oe5cu -o2e4d -o5ee -oe5ic -o3elec -o2ele -oel1li4 -oell2 -oelo4 -oe3o4p -oep5 -o5eq -o3er1 -oes3t2 -o1et -o4et. -oet3i -oet4r2 -3oeu -o3ev -o3ex -o3flu4 -ofl2 -4o1fo -o4ful -o3fu -ofun4 -2o1g -o2ga -o3g2a2m -og5a2r5 -o3ga4s -o3gen1 -o5gey -o3g2i -o4g2io2 -og2n1a -o2gn2 -ogon1i4 -o4g2ot -o2gri -o1gr2 -o4g4ro -og4s2h -o2g1s -o2gu -o5gyr -o2gy -o1h2 -o3ha2 -oh1ab3 -o3he2 -oher4er1 -oher1 -oh2er2e -o3ho4 -oh2y4 -2oi -oi4c -o3ic. -o4i5ch -o2i4d -4oide -o2ig4 -oi5ki5 -o2ik2 -oi2l3er1 -oil5i -oin3de2 -o2i4nd -o3i4ng -oin4t5er1 -oin1te -oin4tr2 -o2i4o2 -4ois -o3i2s1m4 -oi4t -oit4al -oit1a -o2ith4 -o1j -ok4ine -ok1in -ok3l2 -ok5u -ol4a4c -ol2a -o4lack1 -o5lali -ol4al -ol4a2n -ola4n5d -ol5ast2 -ol4as -ol4at5er1 -ola4te -ol5ch -ol1c2 -ole2c4 -ol5e1c2i2 -ol5efi -ole1f -o3leo -ole4on -o3lep -o2l1er1 -o3lest2 -o3leu -o1l2i1a2 -ol3ica -o3l4ice -ol5ic1iz -oli1c2i2 -ol5ick1 -ol3i4c5s4 -ol5id. -oli2d -oli2e2 -o3li1er1 -ol5i4es. -o5l2i1f -ol4i4f3e -oli5go -o1l2ig -o5lin1a -ol3i4ng -oli5os -ol2io2 -ol5ip4 -ol1is4 -ol2it -olle2 -oll2 -ollim3 -ol1li -ol4lop4e -ol1lo -ol4ly1i -ol2ly -ol3mi -olm2 -o1lo -4o1l2oc -ol3o2i4d -ol2oi -o4lon1a -ol5on5el -olo2n4e -ol1or -o3los -ol1o4u2 -4ol1ub -o3lu1mi -o5lun1te -olun1 -ol3us. -oly3ph -o2ly -ol2yp2 -4olyt -2om -o1ma -o4m1ab -o2mac -o2mal -o4mane -o1ma2n -o3mas1t4 -o3mat -om4b2e -o2m1b -o2m2e4d -om2e -ome4g -o5meg5a1 -ome3li -o2m3en1a -o1men -omen4t -o3meri -omer1 -om1i -o3m2i1a2 -om1i2c -om2i1c5r2 -om4ie. -omie2 -om2il4 -om4i2ny2 -om2i4s1s4 -om2i2t -omm2e4 -o2m1m2 -om2n1a -o4m1n2 -omn1i3 -o4m2oi -o1mo -omoli3 -o2m4o4n -om5o2ny2 -o4mos. -omo2t5iv -om2ot -o2mo4u2 -om5p2il -om1p -om2pi -ompt5er1 -om2pt2 -ona4d -on1a -on3ai2 -o5nas. -on2as -onast5i -onast2 -on5a2t1iv -onati4 -4on2au2 -on1c -onc1at3 -on1c2a -on4cho -on2ch -5ond5a2r -o4nd -on2da -ond5ent -onde2 -on3der1 -on3dr2 -on5d2y -o2ne -4onea2 -onec4r2 -4o2n2ed -on1ee -on5ell2 -o3neo -o1n3e4ss -on1et -ong3at -o4ng -ong2a -on4gu -4on1h2 -4o1n2i1a2 -on1i -on5ia2r -2oni4c -onic5a -oni4c1al4 -on4i2d -on3ies -on2ie2 -on3i1f -o5n2i2g -o1n2io2 -on4k4s -o4n2k -4onnes -on1n4 -onne4 -on5o4di -on5oi -o1no4m1i -on2om -4o5nom1i2c -ono3s -o5not1a -on2ot -o4n1s2 -2ont -ont5a2ne. -ont4ane -onta2n -on4ter1 -on1te -onti5fi -ont2i -on1t2i1f2 -onton5 -on1t2o -on1t4r2 -on4tr2e -on5u4r -on2u -o5nus -onvo5lu -on3v2 -on1vo -onvo2l -on2z2 -2oo -oof3er1 -o2o1i -ook3er1 -ook3i -oo4le -ool5i2e2 -o2o4m -oon3i -oo2p -oop4ie2 -oop1i -o3o2p1t2 -oo4se -oost5er1 -oost2 -ooste2 -o2o2t -oot3er1 -ooz5er1 -o2oz -o1pa -o4p1ab -o5pali -opa5ra -o1pa2r -op4a2th5 -o5pec -ope4n2s4 -op1er1 -3opera -4opera4g -o1pha2 -o4phe4 -oph4ie2 -o5phi4l -op5hol -o1pho -o1ph2y -ophy5l2a -o3phy2l -op1i -op3ies -op2ie2 -op5i4ng -o3p2it -4op2l2 -oplast4 -o1pl2a -opl4as -o4p2oi -o1po -opol3i -op4o2n4 -o2p5o2ny2 -op5or1i -op2oun4 -opo4u2 -o2p5ov -op2p2l2 -o4p1p2 -op5pli -oprac4 -o1pr2 -op3ra2n -opr2e4 -opro4l -op5r2op -op5so -o2ps2 -1o2p1t2 -op2ta -op1u -o5qui3al -oqu4 -oq2ui2 -oqu2i1a2 -or1a -or5a2do -or2ad -ora4g -o5r2ai2 -or5al -4ora4ls -or2a2m4 -oran3e -ora2n -orat1or5 -ora2to -or2b3in -or1b -orb2i -or4ch -or1c2 -orch3i -or4du -2or2e -or5ead -orea4 -ore5a2r -ore5c2a -ore3f -ore3g -or3ei4 -oreo5l -or3e2sc2 -ore3s2h -or3e4ss -orest5at -orest2 -or5este2 -or5e2t2t4 -ore4v -5orex -or4fr2 -or1f -or5gn2 -or1g2 -or1i -4o4ri. -or3ia. -or2i1a2 -4oria2n -ori4c2i2 -ori5ci2d -or2i1en4 -orie2 -or3i1f -5or2ig -ori5ga -or4i4no -4orio. -or2i3o2 -or5ion -4orios -ork5a -or2k -2orm -orm1i -or3n4a -or2n2 -5or1nis -orn1i -or3nit -or3o2ne -o5r2oo4 -or5ose -or5oso -or1o4u2 -orre2l3 -o4r1r4 -orr2e -orres3 -or4sc2 -o2r1s2 -or4sey -or2se -or4sti -orst2 -2ort2 -ort3a2n -ort3at -ort3er1 -or5t4es. -or2t2es -or3th2i -or4t2h2r -or4tit -or2t3iz -or4t1or -or1t4o -or5tra -ortr2 -ort3r2e -4or1u -or4un1 -or2y5p -o2r2y -o3s2a5i2 -os3a2l -osa2r5 -o1sc2 -os4ca -os4ce -o2s2ch2 -o4s1c2i2 -osclero5s4 -oscl2 -osc2le2 -osc2ler1 -o3sec -osec3u -ose5g -os5enc -o3se4n2s4 -os5eo -oser4 -o2set -os5eu -o3s2i1a2 -osi4al -osi4a2n -os5i4de -os4i2d -o3si1er1 -osie2 -os5i1f4 -o2s1in -o4s1is2 -o5ske2 -os2k2 -o5son -o3soph -os2o2p -os3o1po -4osp -o3spec -os1pi -os4sa -o4ss -oss5a2r -os4s2it -4osta -ost2 -ost5age -os4ta2r -os5tee -oste2 -os5ten -osten5t -ost5ica -ost1ic -os3til -o5stom2e -ost2om -ost3or -4osu -os1u4r -2ot -ot3a4g -o5tal1it -ot3a2m -ot4an3ic -ota2n -otan1i -o3t2ap -ot4a1t2io2 -o5t2a5v -o3t4a4x -o4t4ed -oter4m -oter1 -ot5est3a -o2t2es -otes4t2 -4oth -othal2a2m5 -otha4 -othal2a -oth5er3in -ot4her1 -othe2 -o5therm -otherm5a -o5thor -o5t2i2a2 -o5t4ill2 -5ot5in1iz -ot1in -ot2in1i -o2t4iv -o3t2iva -o5tivi -o1t2o -o5to2n4e -o4tor2n2 -ot1or -o4to4u2 -4o1tr2 -otur1i4 -otu4r -oty3le2 -o4u2 -5o2u3a2 -oub2 -ou5br4 -ou5c2a -o4uc -ou5c2o2 -oud5i -4oue -ou3e4t -oug4 -ou5ga -ought5i -ou2gh -ough2t -ou5g2i -oul4t -oult5i -ou3m -2oun1 -ou4n2d -oun2d5a -ound5e4l -ounde2 -oun5gin -ou4ng -oun2g2i -oun3tr2 -oup5li -o2up -oup2l2 -our3er1 -ou4r -our2e -ou5sa2n -2ouse -5ous2i1a2 -ou4ss4 -out5is2h -out2i -ou2t1is -ou4v5a -ouv2 -o1v2a3le -o5va2r -4ovati -ov5el3i4ng -o2vel -o4ver. -over1 -o5ver3b -ove2r3s2 -ov4ete -ovid5en -ovi2d -o1vis -ovis5o -o2v5os -o1vo -ow3ag -ow3a2n -o5w4ay -owd4i -ow2d3l4 -ow1el -owel5li -owell2 -ow5ha2 -owh2 -owh2ith4 -ow1i2 -ow5in -owi5n2e -ows4 -ow5s2h -ow5sl4 -ow5y2 -o4x -ox3i -oxic5ol -oxi4c -ox2i2c2o2 -ox5o -2oy -oy5a2 -o4y2s4 -2oz -o1zo -oz2o5i -o3zy1g -4pa. -pac4te -pa2ct -pa5do4u2 -pa2do -pad4r2 -pae4s4 -pa3ga2n -4pag1at -pag4ati -pain2 -p2ai2 -4pairm -pa2ir -pa5la2n -pal2a -pal3in -pa3lo -p4a4ls -pan5a2c -pa2n -pan2a -pan1e -pan3i -p4a4pa -p2ap -pa3pe -pap3u -pa3p4y -1pa2r -para5s2 -par3l -pa3r2oc -pa3rol -par5o4n -1p4as -pass5ive -pa4ss2 -pas4s1iv -pas1t2 -pas4t1in -p4a3ter1 -pa2t3i4n -p5a2to -pat4ric -p2a2tr2 -pat1ri -pa5tric2i1a2 -patri1c2i2 -5p2au2 -paul5e -pa2u3p -pa5vi4l -p2av -5paw -pawk4 -paw5ki -2p1b -p1c4 -p5d2 -2pe. -pear4l5i -pea2 -pea2r -pe4c2o2 -pec4tu -pe2ct -2p2ed -5ped3a -3pede2 -3pedi -ped3i4s1 -3pe4d1s2 -pe2du -p4ee -pe2f -4p2ele -pe5le3o -pel5v4 -pen4at -pen1a -5p4enc -pend5er1 -p2e4nd -pende2 -pen5dr2 -pen4ic -pen1i -3p4en1n4 -pens5ati -pe4n2s -pens2a1t -p4en5u -pe5on -5p2er1c2 -per1 -per3cent5 -per1cen2 -4p2er2e -perem5i -p4eri -5p4er3n2 -p3eron -per4os. -per5t1in -pert2 -per2t5is -per3v2 -p4e2r2y -2pes -pe4s4s3 -pes5til -pe2sti -pest2 -3pet -pet5all2 -pet2a -pet3en -pe2ti -pet3r2 -pe4wa -4pex -p1f -p5g -2ph. -4phae -pha2 -pha5g2e4d5 -ph5al. -ph2a2n -phant5i -phan4t -phe4 -ph5esi -ph3et1 -3phib -4ph1ic -1phi4l -ph1i4n -ph1is -ph2i5th -p4h2l -1pho -4pho2bl2 -4ph4o2n2ed -ph2o4n -pho2ne -3phor -ph5or1iz -phor1i -ph4os3p -ph3o4u2 -3phra -p2hr -4p2h1s -1phu -ph2u5i2 -2phy. -ph2y -3phy2l -4pi. -3pia2r -p2i1a2 -4pica -p5i4c1al -p2i3c2o2 -p2i4cr2 -pi2ct4 -p2ie2 -p4i1es2t2 -pi5eti -p2i1et -p5ifie2 -p2i1f -pi2g3n2 -p2ig -p2il -3pile -pil2l5in -pill2 -pil1li -5pilo -pi3l2ot -pim2 -pin4e -pin5et -3p4inge -pi4ng -p4in1n4 -5p4i4n1s2 -3p2i1o2 -pip4a -p2ip -pi4pe -5p2iq -pir5a4c -p2ir -pir1a -pir4t2 -p4is. -p4i2s3c2 -p2i4s2s -pis1s5a -pis5til -pist2 -pis4tr2 -p2itu -2p3k2 -p2l2 -1pl2a -pla5n1o -pla2n -plant5er1 -plan2te -plas5t2i1c2i2 -pl4as -plast2 -plast1ic -pla5t4o -4p4le. -4pled. -p2l2ed -3pleg -3plen -2ples -4pli2s1m4 -pl1is -4plist2 -plu2m -plum4b2e -plu2m1b -plumb5er1 -p4ly -2p1m -2pn -pnos4 -1po -4po. -po3c2a -p2oc -3pod -4pof -2p5o2i4d -p2oi -pois5i -p4ois -po5lem1ic -po4ly1 -pol4y3s -po1ma2n5 -p2om -po1ma -pom4e -p4o2n -pon4a2c -pon1a -pon4ce -pon1c -pon4i4e2 -pon1i -3pon3i1f -pon5ta -p2ont -2po2ny2 -po4pa -po5ple -p4op2l2 -4pora2to -por1a -por3ea4 -p2or2e -4po2r4ed -por3i4f -por1i -por3p -3p2ort2 -por5tie2 -3p4os -po4s1s2 -po1te -p2ot -poul1t5e -po4u2 -poul4t -pound5er1 -p2oun1 -pou4n2d -pounde2 -pout5er1 -p5ox3i -po4x -5p2oy -4p1p2 -p1pa2r3 -ppar1at5 -p4p4ene -p3pet3 -pph4 -ppi4c -p4p2l2ed -pp2l2 -p5p2ler1 -p5p1let -ppres2s5o -p1pr2 -ppr2e -ppre4ss -ppr4ob5a -1pr2 -prac1 -pr2a5d -pra2r4 -4p4re. -pr2e -pre1b3 -p2r4e1d -pr2ef5er2e -pre2fe -pre1f -prefer1 -prel5a4te -pre1l2a -3prem -pre5mat -pren3 -pres3a -pre5scin -pre2sc2 -pres1c2i2 -p3rese -5pressi -pre4ss -5pri1c2i2 -pri4es -prie2 -4pri4m -pring5er1 -pr4inge -pri4ng -pr4in2g5i -4pr2i3o2 -p5r4i1ol -pri4os -pri2s5in -pr2i4v2 -4pr2iva -4p4ro. -pr4o3bo2 -p3roc3a -pr2oc -pro4ch -pro1l -pron4a -pro4ph5e4 -pr2op -pro3pyl5 -pro2p4y -pro3r2 -pros4i -pros5tr2 -prost2 -pr4o3th -pr2ot -4p2r2y -2ps2 -p3sac -ps4al5t -psa2l -p3s2h -p1si -p5sin. -p2sin -p1s2o3m -p1st2 -psul3i -p1s2ul -3psy1c -ps2y -2pt2 -p2t3ab -p4tad -p4ta2n -p2ta2r -pt5ar1c2 -p1t4ed -p5ten1a -pt5en1n4 -5pte2r2y -pter1 -p5tet -pt4ic -p5tie2 -p3til -p2t3in -pt4ine -p3tise -p2t1is -p5tisi -p5t2om -p4tr2 -p1tu -pub1 -pu5b2e -p4uc4 -pu4ch4 -pudi4c -pu5er1 -puff5er1 -puf2 -pu2ff -puf1fe -pu4la2r -pu1l2a -pu5lar. -pu5l1is -pul2i -p4u4m -pu1m4o -p4un1 -pu4n4a4 -3punc -pun5g2i -pu4ng -pun3i -pun2t -pu3pi -p2up -pur5b -pu4r -pur3c2 -p4us -push4ie2 -pus2h -pu3tat -p5u5t1is -put2i -pu3tr2 -4p1w -2p4y -py3e -3py1g -3pyl -pyr3e -py5t -4qf -qu4 -5qu2ak -q2ua2 -4qua2r -qua5t2io2 -2que. -3quer3a -quer1 -4qu2er2e -4qu4es. -1que4t -5quin1a -q2ui2 -5qu2ir -3quito -4quitu -4ra. -r2a3ba -r1ab -5r2ab2e4 -3r2a3bin -r2abo2 -ra3bol -rac4a -r2acu -rac5u1l2a -ra5cu1lo -r2ad -ra4de -rad4in4e -ra2d1in -ra2g5o4u2 -ra3gr2 -3raill2 -r2ai2 -ra5ist2 -4ral2i1a2 -r2a3ly4 -r5a4m1n2 -r2a2m -ra3mu -r4andi -ra2n -ra4nd -ran5dis2h -randi4s1 -ran4du -ra5nee -ran4gen -ra4ng -ra3n2i1a2 -ran1i -ra3n2oi -ran1o -ran2t -ran5t4ed -ran2te -5ran3te2l -rant5in -rant2i -ran1t5o -rapol5 -r2ap -ra1po -rap5to -ra2pt2 -4rar1c2 -ra2r -rar2e2 -rar3e1f -rar5ia. -rar1i -rar2i1a2 -ras2 -r2as3c2 -r2as2e -r4as2k2 -r2a3so -ras1s5a -ra4ss2 -ras2s5in -r4as5te2 -rast2 -ra5t2a3p -r2a2ta -ra5ta3t4 -rat5eu -rath4e2 -r4a2th -ra2t3i1f2 -rat4in. -ra2t3in -ra5t2oc -ra2to -5r2a5tol -4r4at2om -ra4tos4 -ra5t2u1i2 -r2a2tu -rat5u4m -rat3u4r -rav5ai2 -r2av -rav5eli -ra2vel -rav3i2t -rawn4 -ra3z2ie2 -r2az2 -raz1i -r1b -r2ba -r4bag -rb3ali -rb1a2n -rba2r3 -r2b2e -rbe5c -r3bel -rbel5o -rb3ent -r4be1s2 -rb2i -rbic4 -rb2ic5u -r2bin -r5bine -rbit1 -r2bos -rbo2 -r4bum -rbu5t4 -r1c2 -rcant5 -rca2n -rca4s -r4c2ele -rce2n5er1 -r1cen2 -rc4ene -rcen5ten1a -r3cent -rcen1te -r2ces -r3ch2a3i2 -r2ch -rcha2 -rch3al -rch5ar4d -rcha2r -rch5ate -r3cheo -rche2 -r4ch1er1 -rch4i1er1 -rchie2 -r4ch1in -rch3is -r3chit -r3cil4 -r1c2i2 -rci5n2o1g -rc2ino -rcis2 -rciz4i2 -rc1iz -r2cl2 -r4c2le2 -r5clo -rco1lo4 -r1c2o2 -rcrit5 -rcr2 -rcriti4 -r2ct4 -rc5ti -r5d2a2m -r4d1a2n4 -rd4an. -r2da2r -r5de4l -r3de4n2s -r4des -rd5e4ss -rd5ia2n -r1d2i1a2 -r4die2 -r5d2ig -r2d2in -rd3i4ng -r3d2i3o2 -rd1i4s2 -rd5ler1 -r2dl4 -rd3li -r4dol -r1do -r2d5ous -rdo4u2 -r2e -4re. -rea4 -r4ea. -reac2t5iv -re1a2ct -re3af -re3a4g -re5alt -re5a2m1b -re2a2m -re3an5i -rea2n -re5ant -re5asc2 -re2as3o -r5e2au3 -3re2av -r5ebrate -re1b -re2br4 -reb1ra -re4b5uc -re3c1al -rec2a -rec4ce -rec1c4 -re3ce -reced5en -re2c2ed -re3cede2 -re3cha2 -re2ch -reci5si4 -re1c2i2 -r4e1c2r2 -rec4t3r2 -re2ct -re3cu -2r4ed -re1de2 -re3di4s1 -re4dol -re1do -re1dr2 -reed5i -re2ed -ree3m -3reer1 -re2fe -re1f -re3fin -re5gali -re5gra -re2gr2 -r2e3g4r2e -reg3ri -re3g4ro -reg3ul -rei4 -r4e3i1f -re1in -r4e3is -reit3 -reit4i -re1l2a -r2e1le -4r4e4l2ed -re3l2i1a2 -rel3ic -re5l2ig -rel2i4q -rel3li -rell2 -r5em. -rem5ac -rema4n4d -re1ma2n -rem5a2to -r3em1p -rem5ul -ren1a4 -ren5at -r4endi -r2e4nd -r4ene2 -ren4es -r4en1i -ren3ic5 -ren4it -ren4ter1 -ren1te -re5num -r4en2u -re3oc -3re2o1g -re5ol2a -reo2l -re3oli -3reo1s2 -re1pe -re4per1 -re5ph1 -rep5i2d -re3pin -re3ple -rep2l2 -r2e4pr2e -re1pr2 -re1q -rer4a -rer1 -r2er2e4 -re5rea4 -r2e3r2u -2r4es. -re3scr2 -re2sc2 -re3se4l -re3sem -re3ser1 -res5ist2 -res1is2 -re5s1it -re3spe -r3esq -re5stal -rest2 -rest5er1 -reste2 -re5s1tu -3reta2r -ret2a -re3ten -re4t4er3 -re5term -re1t2o -re5ton -re3tra -retr2 -r2e3tr2e -re5t1ri -re3tu -re3un1 -reu4r4 -re1v -re2v3el -revi4t -r1f -rf4l2 -rfu4m -r3fu -r1g2 -r4gag -rgal4 -r2ge -r5gee -r4g4ene -r3gen -r3ge4o -r3ger1 -rg5li -r2gl2 -rgu5f2 -rh2 -r5hel4 -rhe2 -rhe5o2l -rhos4 -3r2h2y -4ri. -ri3a2m -r2i1a2 -ri5ap -2r2ib -r4i3bo2 -ric2a5t4u -ric1at -2r4ice -rich5om -r4i2ch -ri1cho -rick4en -r2ick1 -ric4ke -r4icl2 -ri5cli -ri3col -r2i2c2o2 -ri5cor -ri4cra -r2i1cr2 -2r2icu -rid4al -ri2d -rid1a -rid4e -ri5el -rie2 -ri3er1 -ri2es -rift5er1 -r2i1f -ri2ft -rif5tie2 -5rifug4a -ri3fu -ri5g2a2m -r2ig -rig5ant -riga2n -ri5l4a -r4ile -rill5er. -rill2 -ril2ler1 -rill5in2g1s -ril1li -ril2lin -rilli4ng -4rim. -ri2ma -rim2a4g -ri1m5a2n4 -rim3at -r4i2m1b -ri1men4 -rim2e -4ri2m1m2 -4ri4m1s -rin4e -r4inet -ring5ie2 -r4in2g2i -ri4ng -rink5er1 -r4i4n2k -r4ino -ri4n4s2 -rin3s5i -rin4t5er1 -rin1te -r2i3o2 -ri2o4g -5rio2ne -ri4op -ri5or -ri5p2a -r2ip -ri5p2ie2 -rip5lica -rip2l2 -r2i5r -ri2s4c2 -r4is4is2 -r2is1p -ris4pa -ris4pe -ris5ter1 -rist2 -riste2 -4risti -r2i3ton -r5it5r2 -r2i4v -riv4al -r2iva -ri5vall2 -riv5eli -ri2vel -riv3en -riv3i4l -5r2i5zo -r1iz -r1j -r2k -r5kas -rk5ati -r5kell2 -rk5en1i -rk1er1 -r3ket -r3key -r3ki1er1 -rkie2 -r5ki1es2t2 -r5k2in. -rk1in -r5k2i4n1s2 -rks4m2e -r4k1s -rk2s1m4 -r1l2a -rlat3 -r1le -r3l4ic -r3l4ine -r5li4n1s2 -r4l1it -r1lo -r3mac -rma5ce -r5mad -r2mal -r4manc -r1ma2n -r4man1o -r4mar1i -r1ma2r -r4ma2r2y -rm4as -r4m3ati -rma5t2oc -rma2to -r5m2a5tol -rme2a2 -rm2e -r2m1ic -rm4ica -r5m2i2d -rm4ie2 -r5m2ig -rm2il5 -rmin4e -rm3i4ng -r4ming. -r4mi4te. -rmi2t -r3m2oc -r1mo -rmol4 -r1mu -rmu3l2i -r2n2 -rn3ab -rn1a -r3na2c -r5nad -rn5a2r -rn3ate -rn5a2t3in -rnati4 -rn5e2dl4 -r2n2ed -r3nel -r3ne4ss -rn5est2 -r3net -r3ney -r5n2i1a2 -rn1i -rn5ib -r3nic -rn3in -rn4ine -r1nis -rn3ist2 -rn2i5v -rn3iz -rn5n4 -r3n2oc -r5n2o1g -rnt4 -r5n4uc4 -rn2u -r5nut -4ro. -ro4b2e -rob3le -ro2bl2 -ro5br4 -5roc1c4 -r2oc -ro3cu -r2od -ro3do -ro3dy4n1 -ro2d2y -ro1fe -ro3gn2 -r2o1g -4r2oi -ro3i4c -ro2i4d3 -ro3l2a -r4o2l2ed -rol5ite -rol2it -ro3ly -romant4 -r2om -ro1ma -ro1ma2n -ro5mel -rom2e -ro3m2i2t -rom1i -romole2c5 -ro1mo -rom4p -ro3mu -ron4a2c -ron1a -4ronal -ro5nate -ron5ch -ron1c -ron4do -ro4nd -ron2g5i -ro4ng -r5onm2e -ron3m -ro1no -ron4ton5 -r2ont -ron1t2o -r2oo4 -1ro2o4m -5ro2o2t -r2op -4rop. -ro3pel -rop4in4e -rop1i -r4o1pr2 -r5opte -r1o2p1t2 -ror5d -4r2or2e -r4osa -ro3s2i4a2 -ro5s2ol -4ro4ss -ro5stat -r4osta -rost2 -ros4ti -ros5tit -ro3tat -r2ot -ro1te -ro4ter1 -ro3tu -5r4oue -ro4u2 -roul3 -round5er1 -r2oun1 -rou4n2d -rounde2 -rou5sel -r2ouse -4rou4ss4 -r4out -r4ow -row3er1 -4ro4x -rpas2s5in -r1p4as -rpa4ss2 -rp3at -rpe2 -r3pent -rp5er. -rper1 -r2ph -rph5e4 -r3phol -r1pho -rp3i4ng -rp5is -rpol3a -r1po -r2p5o4u2 -rpr2e4 -r1pr2 -rpre4t5er3 -r3pu -r1q -4r1r4 -rr2a4h -rran5g2i -rra2n -rra4ng -rr2ap4 -rre2l -rr2e -r4reo4 -rrhe3 -rrh2 -r3ri -rric4 -r2r2icu4 -rri4fy. -rr2i1f -rri3fy -rr4in5ge -rri4ng -rri4os -rr2i3o2 -rrob3 -rr2o1g5 -rr2o4t -r5ru -r2r2y5 -r3ry1i -r3rym -2r1s2 -r4sa4g -r2sa2l -r5sal1is -r5sal1iz -r2sa2n -r4sa2r -r2se -r3se2a2 -r3sec -rsel4 -rsell5 -rs3er. -rser1 -2r1s3e2r1s2 -r3set -r3sha2 -rs2h -r3shi -r4shie2 -r5s2i2a2 -r4s3ib -r5sie2 -r4sil -rs3i4ng -r2sin -r3s2io4 -r4s1it -r4s3iv -rs5li -r2sl4 -rst1or4 -rst2 -rstrat4 -rstr2 -r3su -r4sus -rswea2r4 -rsw2 -rswea2 -rt2 -r2t3ab -rta4g -rt3age -r3ta2r -r4tar2e -r2t3c2 -r1t4ed -r4te2dl4 -r3te2l4 -r5t2e4nd -rt3en1i -r5ter3er1 -r2t2er2e4 -rter1 -r5tet -r5teu -r4th4ene -rthe2 -rth2i -rth5ing. -rth1in -rthi4ng -rth3ri4 -r1t2h2r -r1t4ic -r4ticl2 -r5t2i1et -rtie2 -r5ti2l3a -r5t4ill2 -rtil5le -rt5i2ly -r2t1in -r3tin1a -rt3i4ng -r3titi -rti5tu -r2t3iv -r2t1iz -rt5let -r2tl -rt3li -r1t4o -rto5l -rt5ri2d -rtr2 -rt5s2i -r4t1s -r1tu -r4tus -rtwis4 -rt1w -rt2wi2 -r2u3a2 -r4ub2e -rub3r4 -ru4ce -r4uc -r2ud -rue4l -r4uf2 -ru3in -r2ui2 -ruis5i -ru2l -r4um2e -r4u1mi -ru4m2or2e -ru1mo -run4c2l2 -run1 -run1cu4 -runcul5 -ru4n2d4 -run2e -ru5net -ru4n4g -run4t -r2u2p -rup5lic -rup2l2 -ru3pu -rur4i -ru4r -rus4p -rust5at -r4ust2 -rust5ee -ruste2 -rus5t4ic -rus4t5u -ru3tal -ru3t2i -r1v2 -r4vanc -rva2n -r2ve -rvel4i -r2vel -r3ven -rv4e1n4e -rv5er. -rver1 -rv5ers. -rve2r1s2 -r3vest2 -r3vet -r3vey -rvi4t -r1w -2r2y -ry5er1 -5ry2g1m4 -ry1g -ry4go -ry2m4b -3ryn1go -ryn1 -ry4ng4 -4ryn4gol -ryp5a -r2yp -ry2t -ryth4i -r2z -2sa. -2s1ab -s3a2bl2 -5sack1 -sac4q -s3a2ct -sac4te -sad5i -sa2d5o -5sae -sa4g -3s2ai2 -sain4t -5s2ak -sa2l -sa5l4ac -sal2a -3s2ale -sa3lie2 -s4al4t -sa3lu -s2a4m -sa5min -sam1i -sa1m5o -sam2p4 -san3a -sa2n -san4d2ed -sa4nd -sande2 -s4an4e -san5ga2r -sa4ng -sang2a -san5i3f -san1i -2sant -sant5ri -san1t4r2 -s3ap -sa2p3r2 -sa2r5s2 -sa2r -3sas. -sa4s3s2 -sassem4 -s2a1t -sa2te -s5a2t1iv -s5a4to2r2y -sa2to -sat1or -s2a2t1u -1s2au2 -sau5c2i2 -s4a4uc -sau4r5 -savi2 -s2av -sa3vo4u2 -sa1vo -4s3b -s4bei -sb2e -sbe4s2 -s2by3 -sc2 -s1ca -sca5len -sc1al -sc2ale -s1c2a2p -scar4c2 -s1ca2r -sc2av3 -s1ce -s4c2ed -4scei -4s4ces -s2ch2 -sci2d5 -s1c2i2 -s2c2o2 -scof4 -s4c2oi -3s4cop4e -5scopic -scop1i -5scripti -scr2 -scr2ip -scri2pt2 -2s1cu -4sc4u4ra. -s1cu4r -scur1a -4scuras5 -2s1d2 -2se. -se2a2 -s4e2a2m -seas4 -sea3w -sec4a -sec5a2n -se2c2o2 -secon4 -2s2ed -se4da -sed4it -3se2ed -3sei -se2i3g2 -5sel2a -4s2ele -se3lec -selen5 -5self -2s4e1m2e4 -sem2i -semi5d -se1m4o -se4n5g -3se4n2s -sen5sati -sens2a1t -sen5sor1i -sent5ee -sen1te -5sen4t3m -seo5l2o1g -seo2l -seo1lo -se2p -se1p3a -sep4si -se2ps2 -3se2pt2 -sep3ti -ser4a2n -ser1 -se5r4en4e2 -s2er2e -ser3en -ser4t4o -sert2 -4ser1vo -ser1v2 -s2es -4s4es. -se5s2h -s5esta -sest2 -1set -5s4eum -3sev -sev3e4n -se1wo4 -3sex -sex1o2 -3sey -2s1f -sfac2t5o -s1f2a -sfa2ct2 -sfi4 -sf2or5e -s1fo -sfra2n5 -s1fr2 -2s1g4 -s2h -4sh1ab -sha2 -sh4abi -sh1er1 -she2 -sh5et1 -shil5li -shi4l -shill2 -sh5i1ne4ss -sh1in -sh2in2e -shine4s -sh3io2 -5sh2i2p -s3h2o4n -4shu4 -sh4y2s4 -sh2y -si4all2 -s2i1a2 -siast5 -4s1ib -s3ic1at -3sic1c4 -2s5icl2 -s2i4cu -si5cul -s4i2d -4sid. -si4de -side5l -sid3en -si1d5eri -sider1 -4si4d1s2 -5sid5u4a2 -si1du -si4e2r1s2 -sie2 -si1er1 -s2i1f4 -si2f5f -s2i4g -1sili -sim4p4ly -sim1p -simp2l2 -2sin -s2ine -sin5et -5sing5er1 -s4inge -si4ng -s2in3i -5s4i4n2k -si5nol -s2ino -si3nus -s2in2u -1s2io4 -4sio. -si5o5s -3s2ip -si4pr2 -s1is2 -4sis2h -4si2s1m4 -s4ist3a -sist2 -si4s1t3o -s1it -si4te -sit5om -4s1iv -5s2iva -s1j -s2k2 -4sk. -s5ka2r -ske2 -s3ket -s5key -s3ki1er1 -skie2 -s5ki1es2t2 -sk5i2ly -ski2l -sk5ine4s -sk1in -4s4k1s -s3ky3l -2sl4 -slan2g5i -sl2a -sla2n -sla4ng -s1lat -3sl4au2 -slav5eri -sl2av -slaver1 -s2le -s5lea2 -s3let -s5ley -s3l1it -s1l2o3c -slov5 -s5l4uc -2s1m4 -s3ma2n -smas4 -s3men -sm2e -sm2i3g -3sm2ith -smi2t -smo4d1 -s1mo -smu5ta1t2io2 -s1n2 -s2n1a -2so. -2s3od -so2d3o -so2d2y4 -3soe -4s3o2i4d -s2oi -s2ol -sol3a -so5l4a2n -so2l4er1 -so3lic -3solve -solv5er1 -1s2o2m -soma5to -so1ma -so3mat -3so2me. -som2e -so5met1e -so1me2t -so3mo -s2on1a -son5at -s4o2ne -son5or -s2o2p -4sor3ie2 -sor1i -5sor2i3o2 -sor4it -s5or1iz -sor3o -s3o2r2y -sos4 -4sose -s4o5th -s2ot -3so4u2 -sov5e -so3vi -spas1t4 -s1p4as -spens5a -spe4n2s -4speo -3sperm -sper1 -s5pero -spers5a -spe2r1s2 -sph2 -s3pha2 -3spher1 -sphe4 -spic5ul -sp2icu -s2pi2d -sp5id. -s5pi1er1 -sp2ie2 -spil4l2 -sp2il -s2pin -sp3i4ng -sp2i5n1i -spital5 -spit1a -s1p2l2 -sple2 -s4p4ly -s2po -5sp2om -spon5g2i -sp4o2n -spo4ng -3spo4n1s2 -3spoon -sp2oo -spr2u5d -s1pr2 -s4p4y -s1r -sr2e2 -sreg5 -sre1p5u -sre4s -4ss -s1sa -s5s2a4m2 -s1sel -s5se4n5g -s3sent -ssent5er1 -ssen1te -ss3er. -sser1 -s5seri -ss3e2r1s2 -s5seu -s3sev3 -s3s2i1a2 -s1sic -s1s2i1f4 -s2s1in -ss4in. -s4s2ine -ss4is. -ss1is2 -s3s2it -ss4ivi -s4s1iv -ss5li -s2sl4 -s2s3m4 -s4s1n2 -s1so -ssol3u -ss2ol -ss4ol1u4b -s4s2or2e -ssor5ial -ssor1i -ssor2i1a2 -ss5po -s1su -ss3w2 -st2 -4st. -s2t1ab2 -sta3bi -4s1t2ak -s4t2ale -stan2t5iv -sta2n -stant2i -s3tas. -5stat1i2c -s2t3c2 -ste2 -ste5a2r -stea2 -ste5at -s4te1b -s4tec -4s1t4ed -s4te2dl4 -s4te4d1n2 -4s2t2er2e4 -ster1 -ster4i1a2 -s1teri -s4tern. -ster2n2 -s3tero -st5es4t2 -s2t2es -s1th -s4tha4 -s4thu -s3t2i3a2 -3st2ick1 -st1ic -s3t2ic1u -stil5ler1 -st4ill2 -s4ti2ly -st3i4ng -st1in -5s4t2ir -s5t1iz -4s2tl -st3ler1 -st3li -s4toe -3ston -sto2n4e3 -ston4ie2 -ston1i -s5torat -st1or -stor1a -st4or5ia2n -stor1i -stor2i1a2 -s4tose -s2to4u2 -s4tr4ay -str2 -str2e4 -strep3 -3st4r4uc -str2u5d -2s4t3s -s1tu -s4tud -stu4m -stur4e -stu4r -4st1w -s4ty -1styl -4su. -su5a2n -s2ua2 -su4b1 -su2b1t2 -su2ct4 -s4uc -sud4a -su3e4t -su2f3f -suf2 -sug3 -3s2ui2 -su2i5c -su5i4ng -1s2ul -s4u2m -su1m3i -su4n4a4 -sun1 -su5pe -s2up -su3pin -supra3 -su2pr2 -sur4as5 -su4r -sur1a -sur3c2 -s4ur1g2 -sur3p2l2 -su5su -su5z -2s3v -svers5a -sver1 -sve2r1s2 -sves4 -sve2st5i -svest2 -sw2 -5swee -swel4l5i -swell2 -4sw4e2r4ed -swer1 -sw2er2e -2s1wo -s2y -4sy. -sy4b2i -s4y1b -sy1c -sy4ce -sy4c2hr -sy2ch -sy4d4 -1syl -3syn1 -syn5e -sy5pho -s2yp -sy2ph -syr5i -2ta. -2t1ab -ta5blem -ta2bl2 -3tabli4 -t2abo2 -ta3bol -ta4bo4u2 -t4a3ce -ta5ch2om -ta2ch -ta1cho -ta3ch2y -ta4ci2d -t2a1c2i2 -t5ade -tad4i -5t2ad1j -ta5d2or -ta2do -tad2r2 -tae5n -taf4 -tage5o -ta5g2o1g -3ta2gr2 -3t2ah -1t2ai2 -3tail -2ta2ir -t4ais -1t2ak -tal2c2 -tal5ent -t2ale -ta5lep -t4al2i1a2 -t4al1in -tal4l3a -tall2 -5tal1lu -t2alo4 -t2a3ly4 -tam5ar1i -t2a2m -ta1ma2r -5ta3me2t -tam2e -tamor2ph5 -ta1mo -ta3morp -tan5at -ta2n -tan2a -tand5er1 -ta4nd -tande2 -t4ane -5tanel -tan5ie2 -tan1i -t5an1iz -ta2nt5a2n -tan2t2a -t4a4pa -t2ap -1tard -ta2r -tar5ia. -tar1i -tar2i1a2 -tark5i -tar2k -tar3n2 -3t2a4r1r4 -tas3i -t3a2s1m4 -5ta4ss2 -tas4t2 -t2a3sta -tast5i4c -t4ateu -3ta2t1is -t4a2to. -ta2to -tat4o4u2 -t2a2t4r2 -ta1t3ut -t2a2tu -tau3t2o -t2au2 -t5awa -tawn4 -t4a4x -4t3b -2tc2 -t1ca -tcas4 -tch5e2t2t4 -t2ch -tche2 -tchet1 -tch5u -4t1d4 -4te. -te5cha2 -te2ch -5tec2h1n2 -te3cr2 -t4ed -te5d2a -4ted1d4 -4te1do -4tee1i -te2g -5tegic -te1g2i -t3ego -te2g1r2 -teg3u -tei4 -te2l -4t4e4l2ed -t2ele -tel5iz -1tell2 -4te3lo -3te4ls -tem3a -4te1m2e4 -t4e5m4on -te1mo -ten4ag -ten1a -4te2n3a2r -4t4ene -t5en3m -5tenn1a -ten1n4 -4ten1o -te5n2o1g -tent4a -te2o -teo5l -2tep -te3pe -tep5i -tera4c -ter1 -t4era4g -t4era2to -3ter3b -5t2erd -2t2er2e4 -ter3e1b -ter5ec -5terel -te3reo -3tere4s4 -1teri -ter3i1a2 -ter5i2d -ter5i1f -t4er3in -ter5iorit -ter2i3o2 -teri5or -terior1i -t4er3i2t -ter5k4 -5tern3it -ter2n2 -tern1i -ter5no -3te4r1r4 -2t2es -4t4es. -tesi4 -t3esq -t3ess. -te4ss -t5ess2es -tes4t2 -test3a -5teste2 -test5er1 -test5in -te2sti -test5or -tes5tu -teti4 -tet1r2 -tet1r5o -tew3a2r -te1wa -3tex -2t3f -t3g -2th. -tha4 -th5al. -thal3m2 -4t4he. -the2 -4th2ea2 -th5eas -4th2ed -1th4ei -3theo -theo3l -t4her1 -5ther2ap -th5er1c2 -t5h2erd -4th4e2r4ed -th2er2e -th3er2n2 -th3e2r2y -4t2hi. -t5hill2 -thi4l -3th4i4n2k -th1in -5th4io2 -th4is. -th5lo -t4hl -2t2h1m2 -th4mi -th3oli -4t5h2oo -4th1o2p1t2 -4thores -th2or2e -3th2ot -5thoug4 -th2o4u2 -1t2h2r -2t2h1s -5thu4r -5thy2m -th2y -3thyr4 -th4y2s4 -4ti. -1t2i2a2 -ti3ab -2t3ib -5t4i5bu -t1ic -t3ic. -tic5as -t2i1c2i2 -tici5a2r -tic2i1a2 -3t4i3cin -t4icity -ti3col -t2i2c2o2 -t2ic1u -4ticule -t3id. -ti2d -t4id1a -3tidi -ti3die2 -t5i4d1s2 -3t2i2en -tie2 -1t2i1f2 -ti3fe -4ti2ff -4ti4f3ic. -3t4i1g2i -t2ig -ti3g2i5o2 -4ti2g1m4 -5tigu -ti4ka -t2ik2 -ti4let -5til1in -t4ill2 -til4l5ag -til1l2a -t4ilt -1tim -tim1a -5ti1me2t4 -tim2e -t1in -5ti5nad -tin1a -4t4i2n2ed -tin3et -ti4ng5i4ng -t4in2g2i -ti4ng -3t2in1n4 -4ti4n1s2 -t4int -tin4te -tin5t4ed -tint5er1 -tin3ue -t2in2u -1t2io2 -ti3oc -tiol3a -t4i1ol -ti5o3mo -ti2om -4tionem -tio2ne -1t2ip -ti5plex -tip2l2 -ti3pli -t2i4q -ti5q2ua2 -tiqu4 -t3iris -t2ir -tir1i -2t1is -3tis1a2n -ti4sa -ti2s4c2 -tish5i -tis2h -3t2i4s1s -tis2t2 -5t4iste2 -t4is1tr2 -ti5t4a2n -tit1a -tith4e2 -t2ith -tit5il -t3i2t1is -3ti2tl -ti3tra -t2itr2 -3t2i1u2 -2t1iv -tiv5all2 -t2iva -t3ive -tiv3is -2tl -t1l2a -tlant4 -tla2n -5tle1b -5tle5dr2 -t2l2ed -3tle1f -3tlem -5tlen -5tletr2 -t1let -5tlew -t1li -tlin4 -4t3m -t1me2t2 -tm2e -tm2o4t5 -t1mo -2t3n2 -t4n2er2e -tn1er1 -2to. -toas4 -t2oa2 -to1b -4toc1c4 -t2oc -to3de5c -tod4i -to5do -3toe -1t2o1g -2t3o2i4d -t2oi -5tok -4to2l2ed -tol4l2 -tolu5 -to5ly -to2m3ac -t2om -to1ma -to1ma4n -tom2at5ol -to3mat -toma2to -to2m4b -to4m2o1g -to1mo -tom5os -to2n4e -t4on5ea2 -3ton1n4 -to4n3s2 -top4e -to5p2i1a2 -top1i -to4p4os -to1po -t1or -to5r2ad -tor1a -4t2or2e -tor5er1 -tori4as -tor1i -tor2i1a2 -to4r5oi -tor5p -tor4q -3tos. -t4o3s4p -tos4t2 -to5str2 -to5tal1is -t2ot -to5tal1iz -to3tem -tot5u -tou4f2 -to4u2 -5tou4r -t3ous -4tov -to3wa2r -t3p -tr2 -tra4c2o2 -4trad1d4 -tr2ad -4tra5ist2 -tr2ai2 -tra5q -trar2ch4 -t4rar1c2 -tra2r -tra5ven -tr2av -tra5ve2r1s2 -traver1 -trav5est2 -3tr4ay -4t4re. -tr2e -4t2r4ed -tre4mo -tren4 -tr4end5i -tr2e4nd -tre5pr2 -tre4s4s -4trew -t5r4icl2 -3t2r2icu -t2rie2 -tri5fli -tr2i1f -tr4i2fl2 -t5rifu5g4a -tri3fu -2tril -tri3li -tri3m2e -t2rit -4tr2ix -t4r2od -tro5f -5troo2p -tr2oo4 -tro4pha2 -tr2op -tr4o3sp -t2r2ot -t5ro1t2o -tro1v -3tr2oy -t4r4uc -tr2u3i2 -2t4r2y -tr4y2s4 -4t1s -t2sc2 -ts4h -ts2i -t4sil -tst4ay4 -tst2 -2t1t4 -tta4 -t3t1ab -t5ta2n -t5tas -t3t4ed -t4t2er2e4 -tter1 -t5ter3er1 -t5tes4t2 -t2t2es -t3ti -tti3tu -ttitud4 -ttitu5di -t3t2ler1 -t2tl -t3t1li -t5t2oi -t5t1or -t3tos -t4t5s -t4tu1pe -tt2up -t2ty -4tu. -t2u1a2 -tu4al5li -tu1al -tuall2 -tuar3i4 -tua2r -tu4bin -tu1b2i -tu5bu -tu5den -tud4e -tud5ie2 -tu5en -4tuf2 -t2u1i2 -tu4is -2tum. -3tu1mi -4tu4m1s -3tun1 -tu4n4a4 -tu4ne -tun5it -tun2i -tup5let -t2up -tup2l2 -tup5lic -tu5rac -tu4r -tur1a -t4ura2n -tur2b3a -tur1b -tur4d -turf5i -tur2f -5turit -tur1i -tur4n2 -5tur5o -1tut -4tu2t4iv -tut2i -t1w -t3wa4 -t2wi2 -twi5li -t3wit -t3wo -twon4 -4ty. -ty4a2 -5ty2ch -ty4let -tyle2 -tyl5i -ty5mi -1t2yp -3type -1tyr1 -2tz2 -t5z2i1a2 -tz1i -t5z2ie2 -2ua2 -u2a3c2i2 -u2ag -u2a5h -u1al -ua5lu -uan4o -ua2n -uan2t5is -uant2i -uant5it -uar3a -ua2r -uar2d -uar3i -uari4n -uar5te2r1s2 -uart2 -uarter1 -uar4t5i -ua5ter2n2 -u4ater1 -uba4 -ub5b2ly -u2b1b2 -ub2bl2 -u1b2i -u4b2icu -ub3lin -u2bl2 -ub5lo -ub3ra -ubr4 -4uc -u1c2a -uc1cen5 -uc1c4 -u4c2e4nd -u1cen2 -u4ch -u5c2hr -uc3l2 -u4c2om -u1c2o2 -uc2o5t -uc2tr2 -u2ct -uc3ub -uc5ul -u5cum -u5d2ac -ud1al -ud4e -ud5e1p -u4der1 -udev4 -ud4g -ud4i4cin -udi1c2i2 -ud3i2ed -udie2 -u5dinis -u2d1in -ud2in1i -u3d2i3o2 -u5di1t2io2 -u2do -u5d2oi -ud5on -u5d2or -ue1b4 -u4ed -uen4o -uen4ter1 -uen1te -uer3a -uer1 -ue4s4s -uest5rat -uest4r2 -uest2 -ues5tri -ue4t -uf2 -3u1f2a -u3fl2 -u4fo -u2ft4 -uga4c -ug5l2i1f -u2gl2 -ug2n1i -u2gn2 -u4go -ug3ul -ug3ur1a -ugu4r -uhem3 -uhe2 -2ui2 -ui3al -u2i1a2 -u2ic -ui3cent5 -u4ice -ui1cen2 -ui1d5o -ui2d -ui2l4a -uild5er1 -ui3lib -uil4t -uin1c5u -u2inc -ui4n4s2 -uint4 -uin4ta -ui5pr2 -u2ip -uis3er1 -uis4t2 -uisti4 -uit5er1 -ui5val5 -u2iva -ui3vo -u2iz -4ul. -u1l2a -u4l1ab -4ul4ac -ul5ard -ula2r -u5lat -ul4bo2 -ul3b -ul3ca -ul1c2 -ul4ch -5ulch4e2 -5ulchr2e4 -ul3c2hr -4ulea2 -u5lee -u1len4 -4ulen1c2i2 -u5lent -u1let4 -u2l4ev -ul2f2a -ul2i -ul4i1a2 -u3l4ine -ul3i4ng -ul5is2h -ul1is -u5liti -ul1it -u5lity -4ull2 -ul4lat -ul1l2a -ul4l5ib -ul1li -ul4l1is -ul4l1it -ul3m2 -u1lo -u5l2om -ulph3i -ulph2 -ul2ph3o -ulp5i4ng -ul4po -2u4ls -ul3s2i1f4 -u1lu -ul1v4 -u1ma -u2m3a2m -u1ma2r4 -u5mas -um4bar. -u2m1b -umba2r -um2b2i -umen4t -um2e -u1men -u1mi -u4m1ic -u2m5i1f -umi4fy -umi5l2i1a2 -um2il -umin4a2r -umin1a -u4m4i2n2ed -u4m3i4ng -u4mor1a -u1mo -u4mos -um2p -um4pa -ump3er1 -ump5li -ump2l2 -um2pt4 -ump5te -u1mu -umu4lo -un1 -u4n3a4 -un5ab -unabu4 -un4ae -un4as. -un2as -un2ce -un4dal -u4nd -un2da -un3d2ed -unde2 -un1de4t -undeter5m -undeter2 -un1di4c -un4die2 -un3do -un4dus -un1du -u3n2er1 -unho5li -un1h2 -unho4 -un2i -u1nic -un4ie2 -un3in -un4ine -un2i5p -uni3s4o -un3ist2 -un2i1v -un3iz -unk5eri -u4n2k -unker1 -un5ket -un3kn2 -2un1n4 -un4nag -unn1a -un5o -un5r -u4n3s4 -un5s2h2 -un2t2i -until4 -un2u4 -un3us -uo3de -uo3dent4 -u5oros -u3os -uo5t4a1t2io2 -u2ot -u1o4u2 -2up -u1pat -u1pe -u5p4ee -uper3 -u1ph -u5pi2d -up3i4ng -u4po -u5pol -u2pr2 -upr2e4 -u5que4t -uqu4 -u4r -ur1a -4u4ra. -ur2a4c2i2 -4urae -ura2g -4urant2i -uran2t -ura2n -uras5 -urb5i4ng -ur1b -urb2i -ur2bin -ur2c2 -urc3a -ur5den. -ur5den1i -ur5die2 -ur4du -ur3ea4 -ur2e -ur5ee -ur1er1 -ur3e2r1s2 -ur1e2t -ur3e2t2t4 -ur2f -ur3f2a -ur1i -u5r2i5cu -ur4ie. -urie2 -ur5ifie2 -ur2i1f -uril4 -ur4ili -ur5ion -ur2i3o2 -uri4os. -ur2l5er1 -ur1le -ur5lie2 -url5i4ng -ur1m4 -urn3al -ur2n2 -urn1a -urn3er1 -ur4n5s -ur1o -ur2o4d -ur5o4m -ur5ot -uroti4 -ur3pen5t -urpe2 -ur2ph4 -u2r2s2 -ur2s5a2l -urs5er1 -ur2se -ur3s2h -urs3or -ur5ta -urt2 -ur1te -ur5t2es -urth2 -ur3the2 -urti4 -ur1u -ur4va -ur1v2 -u3sad -us3a4g -us3a2l -us4ap -us3a1t -2usc2 -us4ca2n -us1ca -ush5a2 -us2h -us5ia2n -us2i1a2 -usil5 -u4s1in -usk5er1 -us2k2 -uske2 -us1p -us4pa -uss4e -u4ss -4ust2 -us3tac -us5ta2n -ust4ic -us5t2i1c2i2 -ust5ig -ust3il -us1to4 -us1tr2 -us4tr2e4 -usur4e -usu4r -us5ur1i -u3t4ane -uta2n -utch4e2 -u2tc2 -ut2ch -ut5en1i -u5te2o -u4t2er2e4 -uter1 -ut2i -u3tie2 -ut3i4ng -ut1in -u5t2in1i -u3t2io2 -ut5i2s1m4 -u2t1is -ut3is2t2 -5u5t1iz -ut3le -u2tl -ut1li4 -ut2o -u4to5s -u4t1ra -utr2 -u4t1s2 -ut5s1m4 -ut4to2n4e -u2t1t4 -u3tu -u4tul -uu4 -uv2 -u4va -uve2 -uven3 -uv5eri -uver1 -u5v1in -ux2o -uy4a2 -uy5er1 -4va. -2v3ab -5vac -va1c2a -va5ceo -vacu1 -v4ad -3vag3a -va4ge -4va2g2e4d -vager4 -va2g5r2 -v1al. -1v2ale -vali2 -va5lie2 -val4ise -val1is -5valu -5val4v -vam4i -v2a2m -va5mo -5van1n4 -va2n -van2t2a4 -4van2tl -var4is -va2r -var1i -4vas2e -vas5el5 -v5a4so -v2ast3a -vast2 -v4at. -5vatee -v4at4in1a -va2t3in -4v2a2tu -2ve. -ve2ct4 -ve3g -3vei -2vel -vel3at -vel2a -4v2ele -v3e2l1er1 -ve5l4ine -v1ell2 -v4el1l2a -vel5ler1 -vel3li -vel5op1i -ve4n4al -ven1a -ven4do -v2e4nd -v4e1ne -ve5n2i1a2 -ven1i -ven2t5o -ven4tr2 -4v4en2u -v5en5ue -5ve3o -5ver1b -ver1 -verde5v -v2erd -4v4er2e4 -ver5ea4 -ver3ei4 -v5er3ie2 -ver3m4 -ver4ne -ver2n2 -5ver2se -ve2r1s2 -4v4es. -4vi. -5vi3al1it -v2i1a2 -vi4a2tr2 -vi3at -vi1b4 -vic2 -vi4ca -vi5car1i -v2i1ca2r -vice3r1 -v4ice -5vi2ct2 -5v2icu -5vider1 -vi2d -vi2gn3 -v2ig -vi4l -vil3i -3vil2i4a2 -v5ilise -vil1is -v5ilize -vil1iz -vil5lin -vill2 -vil1li -vim4 -5vim2e -2v1in -vin4a2c -vin1a -3vin1c2i2 -v2inc -vin2e -5vinit -v2in1i -v5in1iz -vint4 -vin5ta -3v2i1o2 -v4i1ol3 -vi5om -5v2i3p -vir2e4 -v2ir -vi5r2i4d -vir1i -vir3u -5visecti -v4i1sec -vise2ct -5vi1s2io4 -v3i2s1m4 -2v5ist2 -vi2t -vit2a -vi3tal -vi5te2l -v5it1ie2 -v2it1r2 -vi3tu -v3ity -viv5al -v2iva -viv5or -vi2vo -v2i5zo -v1iz -1vo -2vo. -vo2l -vo5li1t2io2 -vol2it -vol4u1b2i -v4ol1ub -volv4 -4von -vo5rac -vor1a -3vor1c2 -4v2or2e -3voro -vo3ta2r -v2ot -2vow -vr4 -v5ra4 -v5ri -v5ro -vr2ot4 -4vs -v3ur2e -vu4r -2vv2 -v5ver1 -v5vi -4vy -4w1ab -wag3o -wais4 -w2ai2 -w3al. -wal2l5er1 -wall2 -w3a4ls -wan5gli -wa2n -wa4ng -wan2gl2 -wank5er1 -wa4n2k -war5d2ed -wa2r -ward5er1 -ward5r2 -war4f -war4te -wart2 -war5th2i -war2th -wa4ss4 -was4t2 -wa1te -wav4in2e -w2av -wa2v1in -w1b4 -w4bon -wbo2 -w5c -w5die2 -w3dr2 -we4b -w4ed -3we2ed -5wei -weight5i -we2ig2 -wei2gh -weigh2t -we2ir4 -wel3i -wel1iz4 -wel4iz3i2 -wel4li -well2 -went4 -wes4 -west3 -w5e4st. -w5f -wh2 -w5hi2d -wi2 -wid4e -wi2d -wi5er1 -wie2 -wil2l5in -will2 -wil1li -wim2p -win2e -wing5er1 -w4inge -wi4ng -win4tr2 -3w4ise -with5eri -w2ith -wit4her1 -withe2 -w3l2a -w2l1er1 -wl1i -wl4ie2 -w1m -1wo -wol4 -wol5ver1 -3w2om -w2on2t -word5i -wot2ch4 -w2ot -wo2tc2 -w2oun4 -wo4u2 -wp5in -wra4 -ws5i4ng -w2sin -w5ster1 -wst2 -wste2 -wt4 -w5te -w3to -wy2 -wz4 -x1a -x4a2ch -x4ade -x2ag -x3a2g1g -xa5me2t -x2a2m -xam2e -x3am1i -xa4n5d -xa2n -xan1o4 -x2as -xas5p -x3c4 -xc2av3 -xcor5 -x1c2o2 -xe4 -x1ec -xec3r2 -xe5cu3t2io2 -xecut2i -xecut5o -x2e2d -x5e2dl4 -x5e4d1n2 -x5eg -x1em -x3en -xen4op -xen1o -x3er1 -xer4g2 -xer3o -x1h -xhort4a -xh2ort2 -x1i -x3ia. -x2i1a2 -x4ias -xi4c -x5i1ge -x2ig -xim3a -x4im2e -xi1me2t4 -x3io2 -x2i4p -x4it. -x4i4t1s -x1o -x4ode -x5om -xo4mat -xo1ma -xo4n -x4os -xotr2op4 -x2ot -x4o1tr2 -x3p -xpel4 -xp4o5n2 -x1po -xp2oun4 -xpo4u2 -x1s2 -x1t2 -x4t4ed -xtens5o -xte4n2s -x1ter3i -xter1 -xter4m3 -xter2n3 -x4th -xti4 -xtr2a5d -xtr2 -xtr2a3v -xtr2e4 -xu4o -x1u4r -xur4b -x5us -x5w -xx4 -x4y2s4 -xy3t -y1a2 -y5ac -1y2a2r -3yard -yas4i -4y1b -yb2i -yc2a5m -y5chede2 -y2ch -y4ch2ed -yche2 -ych5is -y3cho -y4chose -yc1l2 -ycl2a2m4 -ycl2a -y4coli -y1c2o2 -y4coll2 -yc2om4 -y2cos -y1d4 -yda4 -yder4 -ydro5s -ydr2 -y4dro4u2 -y3ee -yel5o -y3en -y1er1 -y3e4st. -yest2 -yes5te2 -y5e2t2t4 -y5f -y1g -y1g2i2 -yg2i5a2 -y3gl2 -yg2o4i -y1h -y1i -y3in -yle2 -ylin5de2 -yl2i4n4d -yllab5i -yll2 -yl1l2a -yll1ab -yl3os -yl5o4u2 -y1m2e4 -y3men -y5me2t -y5m2i1a2 -ym5in -ym2ot4 -y1mo -ym4pha2 -ym1p -yn1 -yn5ago4 -yn1a -yna4nd5 -yna2n -yn5ap4 -yn5ast2 -yn2as -yn4c2i2 -y4nd4 -yn2e -yn3er1 -y4ng4 -yn4gol -yn1go -yni4c -yn1i -y2n4y2 -y1o2 -yo3d -yo4g4i4s -y2o1g -yo3g2i -y2oun4 -yo4u2 -you4ng5 -2yp -yp5al -yper3 -y5p2er2e -y4p4eri -y4pero -y4pet -y2ph -yph4e4 -yph3i -y4p1i -y2p1n -y1po1 -y4po4x -y2pr2 -yp5ri -yp4si -y2ps2 -yp5sy5f -yps2y -ypt3a -y2pt2 -y5pu -y3rag -yr3at -yr3ic -y5r2ig -yr3is -yr3i4t -yr5o1lo -y4r4r4 -y2r4s2 -yr5u -4y2s -ys5a4g -ys5a1t -y3s2c2 -y3s2h -ys1ic -y2s3in -y1s2i4o4 -yso5 -ys4so -y4ss -ys1t2 -ys4to -y3u -yv4 -y3w -yz5er1 -yzy4 -z1a1 -2za. -za4bi -z1ab -z2a2i2 -z4as -za4te -zd4 -ze1b4 -z2e4d -zen4a -z5e4ng -zer5a -zer1 -z3et4 -z1i -zib5 -5zic4 -z2ie2 -zi5m -zin4c3i2 -z2inc -z3i4ng -z4in2g5i -z4is -3zlem -z3ler1 -z3li -4zo. -5z2oa2 -zo3a2n -3z2oo2 -zo3ol -zo3on -zo5o2p -zo5oti -zo2o2t -zo5p -z2ot2 -z5s -5zum -4zy. -zz2 -z3za2r -zz1a1 -z5z4as -z3z2ie2 -zz1i -zzo3 -z5z2ot2 diff --git a/dist-packages/wordaxe/wordaxe/dict/hyph_en_GB.dic b/dist-packages/wordaxe/wordaxe/dict/hyph_en_GB.dic deleted file mode 100755 index c2c5f8e2f..000000000 --- a/dist-packages/wordaxe/wordaxe/dict/hyph_en_GB.dic +++ /dev/null @@ -1,11388 +0,0 @@ -ISO8859-1 -.ab4i -.1ab -.ab3ol -.abo2 -.ace4 -.ace2t3 -.a2ch4 -.ac5t2iva -.a2ct -.ac2t1iv -.ad4d1in -.ad1d4 -.ad3di -.ad3e -.a2d3o -.4a2e5d -.aer3i -.aer1 -.a2f3f -.a2f3t -.ag4a -.4a2g5n2 -.a2ir3 -.2ai2 -.al5im -.4al1k -.al3le -.all2 -.a1m5a2r -.2a2m -.ama5te -.am1at -.am2i -.am3pe -.am2p -.am3ph -.a2n1 -.an1a3b -.an2a -.an2a3s -.a4nd2 -.an5da -.an4el -.a4n4en -.an4gl2 -.a4ng -.an4on. -.an1o -.a4n3s -.an2t3a -.an3t2i3 -.4ant4ic -.an4t5o -.a2n2y5 -.a3ph5or -.2ap -.a1pho -.ap4i -.ar5ab -.a2r -.ar5ap -.ar4c2i2 -.ar1c2 -.ar5d -.ar4e -.ar1i4 -.ar4ise -.ar4isi -.ar5sen -.a2r2s2 -.ar2se -.ar4t5icl2 -.art2 -.ar1t4ic -.as1 -.as4q -.as5s1ib -.a4ss2 -.at5a2r -.2a2ta -.ateli4 -.ate2l -.at5omi4se -.a2to -.at2om -.atom1i -.at5om1iz -.2a2t3r2 -.a2t3t4 -.au3b -.2au2 -.au3g4u -.aur4e5 -.au4r -.aus5 -.authen5 -.au3th -.authe2 -.2av4 -.av5era -.aver1 -.bap5ti2s1m4 -.b2a4p1 -.ba2pt2 -.bap2t1is -.barri5c4 -.ba2r -.b2a4r1r4 -.bar3ri -.bas4i -.1bas -.ba5sic -.be3d2i -.b2e -.2b2ed -.be3lo -.1bel -.be5r4a -.ber1 -.be5s1m4 -.be1s2 -.bi4er1 -.b4ie2 -.blaz5o -.2bl2 -.bl2a -.b4l2az2 -.bo3lo -.bo2 -.bo1s5o2m -.bou4n4d -.bo4u2 -.b2oun1 -.bov4 -.3bra5ch -.br4 -.br2e2 -.burn5i -.bu4r -.bur2n2 -.ca3de -.ca4gin -.ca1g2i -.cam5i -.c2a2m -.ca1m3o -.ca2n1 -.can5t2a -.ca5p2itu -.1c2ap -.cap1i -.car4i -.1ca2r -.cas5u1al -.3cas1u3 -.cas2ua2 -.ca4ti -.c1at -.cen5so -.1cen2 -.ce4n2s -.cen5ten1a -.3cent -.cen1te -.cen4t5ri -.cen1tr2 -.cer4i -.cer1 -.2ch4 -.cit4a -.1c2i2 -.cle1m5e4 -.cl2 -.c2le2 -.clima5to -.cli1m -.co5i4t -.1c2o2 -.c2oi -.co3pa -.cop5ro -.co1pr2 -.c4o3r1u -.co3si -.co5ter1 -.c2ot -.coty3le5 -.5coty -.cri5t2i1c2i2 -.cr2 -.crit1ic -.cust2om5 -.1cus -.c4ust2 -.cus1to4 -.3d2av5 -.dea5c2o2 -.dea2 -.de5lec -.d2ele -.del5eg -.de3li -.de3l2i5r -.1d4e1m -.de5nit -.den1i -.de3n1o -.der2 -.de3ra -.de5re4s -.d4er2e -.1de3ri -.de5sc2r2ib -.5de2sc2 -.descr2 -.de5ser1v2 -.deser1 -.de5signe -.des4i -.des2i4g -.desi2gn2 -.de5s2ir -.de5s1is2 -.de5sp2oi -.des1p -.des2po -.determ5i -.1de1t -.deter2 -.de3ve -.de4w -.di4al. -.1d2i1a2 -.dia3s -.di4at -.din4a -.2d1in -.di2o5c -.3d2i1o2 -.1do2 -.do4e -.domest5 -.d2om -.dom2e -.do2me2s -.du4al. -.1du -.d2ua2 -.du1al -.3d4u4c -.d4y2s3 -.2d2y -.eas4t5 -.ea2 -.ech1in5 -.ech3i -.e2ch -.e1c2o3 -.e2c3t -.e1d5em -.2ed -.ede2 -.ed4it. -.ed1it -.ed4iti -.eg4 -.ei3d4 -.e2i5r -.e2l3ev3 -.2ele -.el2i -.elu5s4 -.e1lu -.e2m3b -.em5in -.em1p4 -.em5p4y -.en1 -.en5c -.en4d2ed -.2e4nd -.ende2 -.e4n3s -.ent2 -.en5ta -.eo1s5 -.epi1 -.epi3d -.er2a -.er1 -.er5em5 -.2er2e -.er4i4 -.er4o2 -.eros4 -.er2ot3 -.er4ri -.e4r1r4 -.es1 -.esc1al5 -.e2sc2 -.es1ca -.es3p -.es3t2 -.eter2n5 -.eter2 -.eth3e2 -.e2th1 -.eu1 -.eu4r4 -.eval3 -.evol5ut -.e1vo -.evo2l -.ew4 -.ex1 -.ex3a -.eye3 -.fal4le -.1f2a -.fall2 -.far4i -.5fa2r -.fec5un2da -.3fec -.fecun1 -.fecu4nd -.f2e4n4d -.feo2ff5 -.feof2 -.fi2 -.fi5l2i1a2 -.1fi2l -.fil5tr2 -.fi1n5e4ss -.2fin -.fin2e -.fine4s -.f1i4n3g -.fi5n4it -.f2in1i -.fi2s4c5 -.3f2o3c -.1fo -.fran5ch -.1fr2 -.fra2n -.fu5g4a -.3fu -.g2a4m -.ga1m5e2t -.gam2e -.gen4et -.3gen -.g4ene -.ge5neti -.gen5i1a2 -.gen1i -.ge3ro -.ger1 -.glor5i3o2 -.2gl2 -.3glo -.glor1i -.gnost4 -.2gn2 -.g2no -.gno4s -.go3no -.3gos3 -.h1ab2 -.ha2 -.ha5bili -.hab1il -.hama5 -.h2a4m -.han4de2 -.ha2n -.ha4nd -.hast5i -.hast2 -.h4e4i -.he2 -.hem5a -.hi2 -.hi3b -.ho2l -.ho5rol -.hov3 -.hy3lo -.h2y -.hy2l -.ico3s -.2i2c2o2 -.idi2 -.i2d -.2ig3 -.i2g1n2 -.il4i -.i2m5b -.in1 -.2i4n3d -.in3e2 -.2in2i -.2in3o -.in3t -.inve2st5i -.in3v2 -.inve2 -.invest2 -.i4r3r4 -.2ir -.i2s4c2 -.is4li -.i2s1l4 -.is4o -.i1s2o5m -.ka5ro -.ka2r -.ki4e2 -.kin3e -.k1in -.lab4o2 -.l2a -.l1ab -.la4m2e -.l2a2m -.lam5enta -.la3men -.lan5i -.la2n -.lash4e2 -.l4as -.las2h -.le4m -.len5t2i -.le2p -.le1p5r2 -.les5son -.3le4s4s -.les2so -.le5va2n -.2lev -.l3eva -.libra2r5 -.lib1r4 -.lig3a -.1l2ig -.l2i3o2 -.li4o4n1s2 -.l2i4p -.loc3a -.1l2oc -.lo4g2i1a2 -.1l2o1g -.lo3g2i -.lo2p -.loph3 -.lous5i -.lo4u2 -.lov5er1 -.lub3 -.ly1o3 -.2ly -.mac5u -.mal5ad5 -.mal2a -.ma5l1in -.mar5ti -.1ma2r -.mart2 -.m4a2th5 -.me5lo3d2i1o2 -.m2e -.melo4di -.ment4 -.1men -.men5ta -.me5r2i2d -.mer1 -.me5r3in -.met4er2 -.1me2t -.met1e -.mi4e2 -.mi3gr2 -.m2ig -.min5ue -.m2in2u -.mir2k4 -.m2ir -.mis1 -.mi5to -.mi2t -.mo3b2i -.1mo -.mo5le2c4 -.mon3a4 -.m4on -.mor5ti -.m2ort2 -.mu3n2i -.mun1 -.mu3si -.mus2i5c2o2 -.myth3 -.2my -.3myt -.n2a5k2 -.n1a -.nar1i4 -.na2r -.nast4 -.n2as -.nas5ti -.ne2c3t -.ni4c -.n1i -.ni5tro -.n2itr2 -.n2o4c -.no2m3o -.n2om -.nos3t2 -.no5t1ic -.n2ot -.nuc2le5 -.n2u -.5n4uc -.nuc3l2 -.o2b2ed5 -.ob2e -.o1b3e4l -.o2b3l2 -.od4 -.o2e4d5 -.oe5so -.o2f5t -.2oi4 -.ol4d -.om2e2 -.2om -.om5el -.on4ce -.on1c -.o2n4e -.op2i -.op2t5a -.1o2p1t2 -.or1 -.or4at4 -.or1a -.ora5tor1i -.orat1or5 -.ora2to -.or5che2 -.or4ch -.or1c2 -.or3d -.2or2e4 -.or3eo -.or4i -.orn1er4 -.or2n2 -.or2o -.os1 -.osi4 -.4oth5 -.2ot -.out1 -.o4u2 -.ov4 -.pal5i -.para5di4s1 -.1pa2r -.par2ad -.par5af -.par1a5t -.p2a5ta -.pa4t2io2 -.pe2c3t4 -.pecu3 -.3ped3e2 -.2p2ed -.p2e4nd4 -.pen5de2 -.pe2p3t2 -.per3i5n -.p4eri -.per1 -.per3se5c -.pe2r1s2 -.per2se -.pe5titi -.3pet -.pe2ti -.ph2 -.phe5n2o2m -.phe4 -.phen1o -.phon4i -.1pho -.ph2o4n -.p2i2e2 -.pi3l3a -.p2il -.plast4 -.p2l2 -.1pl2a -.pl4as -.plic4 -.plica4 -.plos4 -.po3l2a -.1po -.po5lite -.pol2it -.po2p -.p4op5l2 -.po5si1t2io2 -.3p4os -.pos1it -.pos5si -.po4s1s2 -.pro5bat -.1pr2 -.pr4oba -.pu4r4r4 -.pu4r -.put4te -.pu2t1t4 -.ra5cem -.ran5g2i -.ra2n -.ra4ng -.re3c2a -.r2e -.r2ef5er2e -.re2fe -.re1f -.refer1 -.re5ga2r -.re1i4 -.re5lin -.re1m -.re5o -.res5c2i2 -.re2sc2 -.re5sen -.re5s2po -.re5stat -.rest2 -.r2e5s4t2or2e -.rest1or -.re5st4r2 -.re3t2a -.re5u -.re3w -.rib5a -.2r2ib -.rin4 -.rit2 -.rol4l2a -.roll2 -.r4os3a -.sa2 -.sac5r2 -.sal4i -.sa2l -.sa5l1in -.salt5er1 -.s4al4t -.sal1te -.sanc5 -.sa2n -.s4ap5a -.s3ap -.sa3vo -.s2av -.sci3e2 -.sc2 -.s1c2i2 -.sea3s4 -.se2a2 -.se2ct4 -.sec5to -.se3gr2 -.sen3t -.se1q -.ser4ie2 -.ser1 -.s2es1 -.sev5era -.3sev -.sever1 -.s2h2 -.si5g2no -.s2i4g -.si2gn2 -.s1is3 -.st4 -.sta2t4o -.stra5to -.str2 -.str4in2g5i -.stri4ng -.su5d4a -.sulph5a2 -.1s2ul -.sulph2 -.sul3t -.tact4i -.ta2ct -.tac5t2ic -.t2a4m -.ta1ma2r5 -.tar5o -.ta2r -.te2ct4 -.tel5a -.te2l -.tell5e -.1tell2 -.te4m -.te5ra5t -.ter1 -.ter4p -.th4 -.tho4 -.thol4 -.ti2 -.til4 -.t2i5n1i -.t1in -.t3i2t4is -.t1or1 -.tran4c -.tr2 -.tra2n -.tri5bal -.t2r2ib -.tri3d -.trin4a -.t4ri5sti -.trist2 -.tro4ph -.tr2op -.tro1ph5o -.tro4v -.tula2r5 -.tu1l2a -.tur1b4 -.tu4r -.tur1i4 -.tu5te -.1tut -.tu3t2o -.4ul4l2 -.ulti5mat -.ultim4a -.ul1tim -.un5ce -.un1 -.un5ch -.u4n3d2 -.under5 -.unde2 -.un3e -.u4n3g -.u1ni3c -.un2i -.un2i3o2 -.u4n3k4 -.u4n5s4 -.un3t4 -.un5u4 -.2up1 -.up3l2 -.ur1a4 -.u4r -.ur5e2th1 -.ur1e2t -.ur2e -.ur4o -.va5l2ed -.1v2ale -.ve2 -.vec5 -.ve5lo -.2vel -.vent5il -.vent2i -.v5er4ie2 -.ver1 -.ver3n2 -.vic5to -.vic2 -.5vi2ct2 -.vi2s -.vis3i -.vi5so -.v2o1c -.1vo -.vo5lut -.vo2l -.wine5s -.wi2 -.win2e -.xy3l -.za5r -.z1a1 -a4a -1ab -2ab. -2aba -ab5ar2e -aba2r -ab4ay4 -2a2b1b2 -ab5ber1 -abb2e -2ab2e4 -ab3erd -aber1 -a5b3e4r1r4 -a3bet -ab1ic -a3b4ie2 -2abin -4a1b2i2o2 -abi5on -ab3it1a -ab4itu -ab3l2a -a2bl2 -abli4 -4abolic -abo2 -abol3i -ab3om -ab3ota -ab2ot -3about -abo4u2 -ab1r4 -2abs. -a4b1s2 -ab1ul -abu4lo -ab3use -ab3usi -2a2by -ac2a -ac5a2bl2 -ac1ab -ac3al -5ac1anth2 -aca2n -ac5ard -a1ca2r -a5c1at -ach5al -a2ch -acha2 -a5ch2in1i -ach1in -ach5i2s1m4 -achro4 -a3c2hr -ach5u4r4 -2a1c2i2 -a4cic -aci4e2r1s2 -aci3er1 -acie2 -ac2i3f4 -4acit -ack5a -ack1 -ac3li -acl2 -4a4co. -a1c2o2 -aco3d -ac5on1r -acon1 -4acos -4aco4u2 -ac1r2 -ac3r2y2 -act5ate -a2ct -ac2ta -act5ile -ac2to -act5o2r2y -act1or -ac2t5r2 -ac5uat -ac2ua2 -a5d2ai2 -a3d2a3v -4adee -ad5en1i -ad4ha2 -a4d1h -ad3ica -a5d2i1f -4adil4 -adi4op -a3d2i1o2 -ad2i4p -adis4i -adi4s1 -a3diti -3adju -ad1j -5admi2t -a2d1m -a2do -4adoe -4ad2oi -ad3ol -a3d4os -ad1ow -ad1r2 -a3dr2a2m4 -4a2du -ad3u1l2a -ad3um -4a2d2y -ae5a2 -ae4cit -ae1c2i2 -ae1c2o3 -4a2ed -aed5i4s1 -ae5g -ae3on -ae5p -aero2d2y5 -aer1 -aer2od -ae4s -ae5si -aes3t2 -aet4a -ae2th4 -aet4or. -aet1or -aev3a -4af. -4afe -af5ta -a2ft -a4fu -ag4ar1i -aga2r -4ageri -ager1 -a5ghe2 -a2gh -a5g2i1a2 -a1g2i -agi4as -4ag2ino -4a2gl2 -agli4 -4a2g1n2 -ag3on1i -agor4a -ag5ot -a2gr2 -ag3ri -a3gru5 -2ah -a1h2a2 -aha2r2 -aha5r1a -a1he2 -a2h4n2 -a5h2oo -2ai2 -4ai. -a2i3a2 -a1ic -aid4a -ai2d -aid5er1 -a2ig2 -ai5gu -ai2l3er1 -ail3o -aim5er1 -aim2e -ain5de2r3s2 -a2i4nd -ainde2 -ainder1 -a4i5nea2 -a3ing. -ai4ng -a2in3i -a2in5o -aint5er1 -ain1te -air5a -a2ir -air5p -ai2r3s2 -ais1i -a5i2s1m4 -2a1j -a4ju -2ak -akel4 -ak5u -al5a2bl2 -al2a -al1ab -ala2ct4 -al4ac -a1l4ae -al5ais -al2ai2 -ala3ma -al2a2m -al5ance -ala2n -al3at -a5l2av -al2c3at -al1c2 -al3ch -ald5ri -aldr2 -2ale -a3lec -aleg4 -ale5ma -al5end4e2 -al2e4nd -a1leo -a2let -al3ib1r4 -ali4c2i2 -al5i4c5s4 -al1i2d -al3i1f -5a1l2ig -al1in -a5l2in1i -al2in5o -al5ipe -al2ip -al5ip2ot -ali3po -4alis. -al1is -4al2i1u2 -4alk -alk5ie2 -al4l1ab -all2 -al1l2a -al4lag -alli5a2n -al1li -all2i1a2 -al1l2ig4 -al4lis2h -all1is -a5loe -al3o1gr2 -a1l2o1g -a3l2om -a3l2oo -al1or -al4orim -alor1i -alos4 -a4lo4u2 -al3ous -a5low -al5pen -al3ph2 -al5tati -al3tie2 -alu3b -al5u4ed -a4lue -al3ues -a5lumn2i1a2 -alu4m1n2 -alumn1i -al1va -al5ver1 -alv5u -2a2ly4 -a5lyn1 -2a2m -a5mad -ama4g -ama4n5d -a1ma2n -a5marin4e -a1ma2r -amar1i -a3mas. -am1at -a5m4at1i2c -am5a2tu -am4bin -a2m1b -amb2i -3ambu -am5elo -am2e -a3men -am2e4n4d -am3era -amer1 -am5erl -am1i -am1i2c -am5ica -am2i1c5r2 -3ami2d -a3mili -am2il -am5i2ly -amin2i4f -am2in1i -am5in1iz -am4inos4 -am2ino -a5m2is. -a4mium. -a3m2i1u2 -ami3um -a3m4on -a1mo -amor5a -am2ort3 -am5ose -am2p -am5p4er3i -amper1 -amph2i5g -amp3li -amp2l2 -ampo5l -am3po -am3ul -amyl5 -a2my -a2n -an2a -a5nadi4 -an3ae -an3age -an2a5k2 -an3ali -an3ar1c2 -ana2r -a5nast2 -an2as -an4con1 -an1c2o2 -an3d4at -a4nd -an2da -and5au2 -and5eer1 -ande2 -an5de4l -an5d2i1f -and5ist2 -andi4s1 -an5dit -an4don1i -an1do -an4ea2 -an5eer1 -an3ell2 -anel5li -an3eu -a2n3ga2n -a4ng -ang2a -angov4 -an1go -an4gu4r -an1gu -4an1h2 -an3ic -an1i -an2i3f -an5i3fo -4an2i2g -an5ion -an2io2 -anis5te2 -anist2 -4anity -4an2i1u2 -an5no -an1n4 -4an2ny2 -an1o -an2oe -an3o1ma -an2om -anor3 -an2os -an5ot -a4n2s -an3s2c2 -an4s2c2o2 -ans3il -an2si -an4su4r -an3su -an2t2a -ant5a2bl2 -an2t1ab -an3t1al -an5t2a2m -an2te -1anth2 -an4thi -3an1t2h2r -4ant1ic -ant2i -an4tie2 -an4t3i4ng -ant1in -an2t4iv -an4to2n4e -an1t2o -an1t4r2 -an4tus -an1tu -an5tym -an2ty -an3ul -an2u -an3um. -an5u4m1s -a3nu4r -a5nut -a2n2y2 -an5y1a2 -a5ny1i -2ao -aol3i -5aow -2ap -4ap. -4apa -a1pac -ap3al -ap5aro -a1pa2r -ape5li -a5peu -aph5em -aphe4 -aph3i -aph5ol -a1pho -a3phy2l3 -aph2y -ap1i -ap5icu -ap3in -ap4in4e -a5p2ir -a3pla2n -ap2l2 -a1pl2a -ap5li -apo5str2 -a1po -a3p4os -apost2 -ap4o3th -ap2ot -a2pr2 -ap5ron -4a2ps2 -apt5at -a2pt2 -apu5la2r -apu1l2a -a5p4un1 -a4q -a5q2ui2 -aqu4 -a2r -4arabi -ar1ab -ar2a5bo2 -aract4i -ara2ct -ara2g -ar3age -ar4a2g2e4d -ar5a1g2i -ar3ago -a3r2a1j -ar3all2 -ar2a3m -ara4n4g -ara2n -aran5te -aran2t -ar5apa -ar2ap -ar1at -a3r2au2 -ar2a3v -ar3ba -ar1b -arb5et -ar2b2e -ar4b2i2d -arb2i -ar4bl2 -arb3li -ar4bul -ar5chet1 -ar1c2 -ar2ch -arche2 -ar1ch5o -ar5din1a -ar2d2in -ar4do2ne -ar1do -ar3en -ar2e -ar2e4n5d -ar5e2t2t4 -ar3e1v5 -ar5gh -ar1g2 -ar3gu -ar3h2 -ar1i -ar5i2ff -ar2i1f -ar4ill2 -a5r4i5net -arin4e -ar5in1i -a5rishi -aris2h -arm3er1 -arm2e -ar5mi2t -ar3nal -ar2n2 -arn1a -ar3nis -arn1i -ar3od -ar5o2i4d3 -a4r2oi -aro4mas -ar2om -aro1ma -aro4n -a5roti -ar2ot -a5ro4uc -aro4u2 -a4r3o4x -arp5e2r1s2 -arpe2 -arper1 -ar4pu -2a4r1r4 -ar2rh2 -a2r2s2 -ar2s5a2l -ar3so -art5at -art2 -ar2th -arth4e2 -ar1t2h3r -ar5t1iz -2aru -ar3um -ar5un4 -a3ry1o2 -a2r2y -a5ry2t -ar5z -as1a -as4af -asa2n2 -2asc2 -as5con1 -as2c2o2 -as5c2ot -as2cr2 -as2e -as3e2ct -4a2s2ed -ase2p4 -ash5ay -as2h -asha2 -ash5i4l -as5i2ly -a2s3in -a5s2io4 -a3s1it -a4s5iv -ask5er1 -as2k2 -aske2 -aski4 -as4l2a -a2sl4 -as4lo -2aso -as5o2ch -as2oc -a4s4o2n2ed -as4o2ne -as5or -as3ph2 -a4ss2 -assa5g2i -as1sa -assa4g -ass5i2bl2 -as4s1ib -as4sil -as3s2it5 -2asta -ast2 -as4tat -as4t2i3a2 -as3t1is -as4tit -4asto2 -as3tra -astr2 -as4tri -as1u -as4un1 -as5u4r -2a2ta -4atabi -a2t1ab -a5tal1is -at2a2m4 -at2a3p -atar3a -ata2r -ata3s -ata3t4 -at3e2au3 -atea2 -at3e2ch -at5eer1 -a5tel. -ate2l -at2e5le -at5enat -aten1a -at3ent -4ater1 -at3era -at5er1n3is -ater2n2 -atern1i -at5ern3iz -4ate4ss -a2t2es -at5et -4a2th -ath3a4 -a3then -athe2 -ath5er3in -at4her1 -ath5ero -ath5ete -athet1 -ath3i -ath3od -a5th2o4n -a1t2h5r -4a3t2i2a2 -at1i2c -at5icis -at2i1c2i2 -ati5cit -at5ic1iz -a2t2i1f2 -a4t1i4l -a4tim -a2t3in -4atin1a -at5i4ng -4at4is. -a2t1is -at1it -atit3u -atitud5i -4a3t2i1u2 -at4ivi -a2t1iv -a5tiv1iz -a2to -5at5od -4a1t2o1g -2atol -4aton -a3t2oo -a4to2ps2 -a5t4oria2n -at1or -ator1i -ator2i1a2 -a4to2r2y -atos4 -a5t2oz -2a2tr2 -at3ra -a4tr2e -5at5re4s4s -at1ri -a3t2r2ic5u -at3ron -at5ro4u2 -at4tag -a2t1t4 -atta4 -2a2tu -at1ul -atu4m -at3ur1a -atu4r -at3ur1g2 -4a2ty -2au2 -4au. -au1b5i -4a4uc -au5cer1 -au1c3o2 -au4d5er1 -aud4e -audic4 -aul3i -aul4t -aul5t4ed -aul1te -ault5er1 -ault5i -au3ma -aun2 -aun5ch4ie2 -aun2ch -au4n3d -aun4dr2e -aundr2 -au5reo -au4r -aur2e -aur4o -au5ror -4aus. -aus5er1 -aus5p -au4s4t4ed -a4ust2 -auste2 -aut3a2r -aut3er1 -au3th -2av -a2v4ab -ava4g -av3a4ge -ava5l2a -av5alr4 -av5ant -ava2n -av5a2r -avas3 -av3e4nd -av3er2n2 -aver1 -av3ig -av4i1ol4 -a3v2i1o2 -av1is -aw5er. -awer1 -aw5e2r1s2 -aw1i2 -aw5n2ie2 -awn1i -aw5y2 -a4x -ax2i2d -ax1i -4ay -ay5l2a -ay3m -ayn4 -a4y2s2 -ay5si -ay5sta -ays1t2 -ayth4 -2az2 -az3a2r -az1a1 -aze4 -az5ee -azy1g4 -azz4l -azz2 -2ba. -ba5b2ir4 -b1ab -3back1 -baen4 -bag4a -5b2ah -b2a4i2 -bal3a -balm5i -balm2 -ba5lon -bal5u -bam4a -b2a2m -ban4a -ba2n -ba5na2n -b4ane -5ba4ng -b4aniti -ban1i -b4a4n2s -b2a4p1 -5bar1b -ba2r -bar4d -bardi4 -bar4n2 -ba5r2om -bar3o4n -5ba2r2s2 -1bas -bas4te2 -bast2 -b4a4th4 -3bat1i2c -ba5t2io2 -b4at5on -ba2to -battle5 -ba2t1t4 -bat2tl -2b1b2 -b4b2a2ta -b3bli -b2bl2 -b4bo2n4e -bbo2 -b3bon -b1c2 -bcord4 -b1c2o2 -2b1d -bdeac5 -bdea2 -bde4b -b1di4v2 -b2e -4be. -3bea2 -4beas -be3c2a -3becu -2b2ed -be3da -bed5el -bede2 -bed2i -be4do -be5dra -bedr2 -be4du -5bee -3be1f -be3go -be5gr2 -be3gu -1bel -be3l2a -2b2ele -be3l1it -bel4t -be3m -b2e4n4d -ben2d5a -bend5er1 -bende2 -b4e1ne -be5n2i2g -ben1i -b4e5n2u -4beo -be3q -2b2er2e -ber1 -berg2a5m -ber1g2 -berl4 -5be4r1r4 -be2r5s2 -b5er2t1in -bert2 -be1s2 -2b4es. -be3sl4 -be3tr2 -be3w -2b1f -b1f2a4 -4b1h -b4ha2 -2bi. -1b2i1a2 -bi4b1 -bi1cen5 -b4ice -3b2i2d -bid5i -b4ie2 -bi4e2r1s2 -bi1er1 -b2i1f4 -bi4fid. -bifi4d -bi5ga -b2ig -bigu3 -b1il -b2ile -5biles -3b2ill2 -4bim -bi1me2t5 -bim2e -5bin1a -5b2i4n4d -bind3e2 -bin5et -b2in5i4 -1b2i2o2 -b4i1o3l -bi2o5m -bi3o4u2 -b2ip4 -b2i5q -b2ir4 -bi3r2e4 -bi5rus -b2is -5bi2s1m4 -bis4o -bi5s2ul5 -3bit2u1a2 -4bity -bi5ve -b1j -4b5k4 -2bl2 -5bl4ac -bl2a -blag4 -b3la2n -5blast2 -bl4as -bl2a5tu -blem5at -3b2ler1 -5blesp -4b3l2ik2 -blim3a -bl2i3o2 -bl2i2q -b3l1is -4b2ly -2b1m -bment4 -bm2e -b1men -bmi4 -4b1n -bo2 -4bo. -3b2oa2 -bo5a2m -5bob -bod5i -bo5h2 -2bo2i4d -b2oi -4boke -bol4e -4bo2l2ed -bol3i -bol4t -3bon -bon4c -bo2n4e -bon4ie2 -bon1i -bon3i4f -bon4sp -bo4n1s2 -1b2oo -b3orat -bor1a -bor3d -bor5ee -b2or2e -bor5et -3bor1i -bor5ic -bor5i3o2 -bor4n2 -bot3a2n -b2ot -5boti -boun5t2i -bo4u2 -b2oun1 -3bou4r -bous4 -bow2 -bow3s4 -4boxy -bo4x -5b2oy -br4 -3bra2ch -4bral -br2a2m4 -b2ra2n -bra4n4d -4b4re. -br2e -b4reas -brea4 -4b2res -brev5et -bre1v -b2ri2d -5brie1f -brie2 -bri4ng5 -bri4os -br2i3o2 -b5rist2 -b4r2oa2 -bro4ma -br2om -bros4 -brum4 -4bry. -b2r2y -4b1s2 -b3sc2 -bscon4 -bs2c2o2 -bsen4 -bserv5a2n -bser1 -bser1v2 -b5si -b2sin4 -bso2 -bsol3e -bs2ol -bso3lu -b4stac -bst2 -bstu1pe5 -bs1tu -bst2up -2b1t -b5t1let -b2tl -4bu. -5bub -buf5fer1 -buf2 -bu2ff -buf1fe -b4ul2i -b4ulos -bu1lo -bun2 -bu4n4a4 -b5u5nat -bunt4 -bur3e -bu4r -bur4ri -bu4r1r4 -busi4e2 -bu4ss2 -bus5si -3b4ust2 -bu5ta2r -b3ute -b5ut1in -but2i -3bu3t2io2 -bu2t4iv -b5ut5o -b1v -4b3w -2by -4by. -3by1i -b4y2s4 -5byt -2ca. -c2ab5in -c1ab -c4ace -ca1c2o3 -cad4r2 -5caf -ca3go -5c2ai2 -5c2ak -c1al -c4al2a -ca5la1ma2n -cala3ma -cal2a2m -cal5a2r -3cal1c2 -ca5le1f -c2ale -cal2l5in -call2 -cal1li -cal4m2 -c2a3ly4 -ca3ma -c2a2m -cam4i -ca5na2r -ca2n -can2a -c2an4e -c4an1o -ca3n2oe -can5ta2r -can2t2a -can5t4ed -can2te -c4an4t1ic -cant2i -can4t4r2 -5c2ao -1c2ap -ca5p2il -cap1i -ca2pt4 -cap3ti -cap3u -1ca2r -ca3ra5c -car5am2e -car2a3m -ca3ree -car2e -ca3r4i3c -car1i -car3i1f -car5m -car3n1i -car2n2 -car3ol -car5o4n -car5oo4 -ca3ro4u2 -car4v2 -cas2e5 -cashi4 -cas2h -3ca4s3s2 -cas5t2ig -cast2 -3cas1u3 -c1at -c4at. -c2a2tc2 -c4at2om -ca2to -c2a3t2r2 -c4a4t1s -c2a2t4u -3c2au2 -caulk4i -cav3i4l -c2av -3c4ay -c1c4 -ccen1t5r2 -c1cen2 -c3cent -cces4sa -c5ce4ss -c3ch -cci3d4 -c1c2i2 -cc2ip4 -cc2le3 -ccl2 -4ce. -4c4e1ab -cea2 -cea2n3 -3ceas -ce4c2i2 -2c2ed -5ceda -ce3da2r -3cede2 -3cedi -4ce1f -ce5g -3ce2iv -cel3ai2 -cel2a -cel5ib5 -5cell2 -cel5lin -cel1li -celo4 -ce5l2om -4ce2ly -2cem -ce4me2t -ce1m2e4 -3cemi -ce4mo -1cen2 -5cenc -cen5c2i2 -cen5d2ed -c2e4nd -cende2 -cend5en -cend5er1 -cen3i -2cen1n4 -3cent -cent4a -cen5t4ed -cen1te -cen5ter. -center1 -cen5te2r1s2 -cen5t2es -1cep -cept3a -ce2pt2 -cep5t4ic -3cera -cer1 -cer4b2i -cer1b -3c2erd -ce3rem -c2er2e -5cer2n2 -5ce4ss -cest5o -cest2 -ces5t4r2 -ce2t -cew4 -2ch -4ch. -4ch1ab -cha2 -3chae -3ch2ai2 -cham5per1 -ch2a4m -cham2p -chan5g2i -cha4n2g -cha2n -ch4a3pa -ch2ap -chec4 -che2 -4ch2ed -3chee -3chem -che3o2l -ch1er1 -ch4eri -5cher3in -ch4erl -4ches -3chete -chet1 -ch5eu2 -che5va -che4v4 -3chew -ch5ex -5c2hi. -3ch2i1a2 -3ch2i2c2o2 -ch1ic -ch3i2ly -chi4l -ch4in. -ch1in -ch3in1n4 -3ch2io2 -5ch2i2p -ch2izz4 -ch1iz -ch5k -5chlor -c4hl -4c2h1m -1cho -ch2o3a2 -5ch2oc -4ch2oi -ch5o2i4d -3chor -4cho2r4ed -ch2or2e -chor5ol -4choso -3ch2ot -4choti -ch5ous -ch2o4u2 -chow5 -3c2hr -chu4r4 -3chut -5chy1d4 -ch2y -3chy2l -3chy2m -1c2i2 -4ci. -4ciac -c2i1a2 -ci2a4m -ci3ca -4ci4d1s2 -ci2d -4cie. -cie2 -ci3er1 -ci3es2t2 -c2i5et -c2i3f -cifi4 -4c2ig -ci3ga -c3iga2r5 -3cil -cil5lin -cill2 -cil1li -2cim -cim3a -ci3m2e -5ci1men -4cin3ab -cin1a -4c2i4nd -c4ine5a2 -cine5mat -ci5ne4ss -cine4s -4cint -c4i3ol -c2io2 -ci5om -ci4po -c2ip -cisi4 -c2it3r2 -ck1 -cka2r5 -cka5t -c4ke -ck5i1f -ck4sc2 -c4k1s -cl2 -cla5r2i1f -cl2a -cla2r -clar1i -3cl4as -c2le2 -2c4le. -c5lec -clemat4 -c2lev3 -cli1m -c3li4ng -cl2i2q -c1lo4q -c4l4o1tr2 -cl2ot -c4lue4 -cl2yp5 -c2ly -5cl4y2s -cn2 -c3n1i -1c2o2 -4co. -3c2oa2 -c4o5ba -3c2oc -co3c2i2 -co5cu -co3dic -co3d2i1f -4co2d2y -3coe -co5et -co3gr2 -c2o1g -4c3o2i4d -c2oi -co3inc -4col. -col3a -co3l2o1g -co1lo -5col1o4u2 -co5ly -co5mas -c2om -co1ma -co4m2e -co3mo4 -com1p4 -con1 -con4ati4 -con1a -con4ch -con1c -con3d5er1 -co4nd -conde2 -con4ey -co2ne -con4ie2 -con1i -co4n3s2 -c2on3t -conta5d -3c2oo -coo2p4 -co3or -cop4e -co3ph -c4o5p2l2 -co3po -c1o2p4t2 -2cor1a -cor5d2ed -cord5er1 -4co2r4ed -c2or2e -co3rel -3cor2n2 -4coro -co5rol -5c2ort2 -3cos. -c4ost3a -cost2 -cost5er1 -coste2 -co5ta -c2ot -3c4o3tr2 -5coty -co4us5t2 -co4u2 -cov1 -co3va -cow5a -c2oz4 -co5z1i -c1q -cr2 -5craf -craft5i -cra2ft -c4ra2n -5cran1i -cr4a5n2i1u2 -cras3t2 -cras2 -cra4te -c2r2e -4crea2n -crea4 -cre3at -cre4p3 -5creti -cre4t2o -cret5or -cri3l -cron4 -crost4 -4cro4u2 -5c4rus -c2r2y2 -crym3 -cry1o3 -4c5s4 -csim5 -2ct -c2ta -c3tac -ctac5u -c5ta5g -ct1a2n -ct5ant -c5tar2i1a2 -cta2r -ctar1i -c3ta2to -c1te -c4tea2 -c2t5ee -c4tent -cter4i1a2 -c1teri -cter1 -c2t5es -ct5et -ct2ic -c5tic2i1a2 -ct2i1c2i2 -c4ti4c5s4 -ctifi4e2 -c1t2i1f2 -c3tim -ct4in. -ct1in -ct4in1a -ct5i4ng -c3t2in1i -c5t2in5o -c5t2io2 -c3t2is -c3tit -c4titu -c4tity -ct5ive -c2t1iv -ct4iv1i2t -ct5o1lo -c1t2om -c3ton -c5toris -ct1or -ctor1i -c5tor1iz -c1tr2 -c2tr2e -ct2r2o5t -c1tu -c2tum -c1ty -cub3at -cuba4 -c4uf2 -cu5ity -c2ui2 -cu4l5ab -cu1l2a -c2ul2i -cul2l5er1 -c4ull2 -cul2l5in -cul1li -1c2ult -cu4mi -5cu4n3a4 -cun1 -cun4e -5cun2i -5cuol -cu5pa -c2up -cu3pi -c3up2l2 -1cu4r -cur4er1 -cur2e -cur5ial -cur1i -cur2i1a2 -4cur4o -1cus -cus5a -c3u2t1iv -cut2i -c3utr2 -5cuu4 -cu5v2 -2cy. -cy4b2i -c4y1b -1cyc -cyl3 -cy4m -cy5no -cyn1 -c4y2s4 -cys5to -cys1t2 -cy4t -cz2 -4da. -d4ab1r4 -d1ab -1d2ac -da2ch4 -d5ache2 -3da2ct -d1ag -d4a4g2i -d4ale -d4al1g2 -dal5ler1 -dall2 -dam5a -d2a2m -3dam2e -d3am1i -da5mu -3da4ng -da2n -d1an4t -d3ap -d3ard -da2r -5darm -3d4as2 -d2ast5a -dast2 -d1at -da2t1iv4 -d2a2t4u -dau2gh3 -d2au2 -daun5te -daun2 -3d2av -d3b -d3c4 -d1d4 -d4d4er2e -dder1 -d3di -d3d2ler1 -d2dl4 -d3dli -d3dy1i -d2d2y -2de. -de1a2c3t -dea2 -de5aw -de4b2i -de1b -deb5it -3dec -de5cant -dec2a -deca2n -de4cil -de1c2i2 -de1cr2 -4de2ct -ded3i -d2ed -def2or5e -de1f -de1fo -de4fy. -de3g -de4gu -de3io2 -5d4e3is -de3lat -del2a -de1li4e2 -del5ler1 -dell2 -del5li -de5lo -1d4em -4de4mie2 -4dem4is -d4em4o4n -de1mo -de4mo4n1s2 -de3mor -de4mos -4de2my -de1n2a -d2e4n4d -4d4ene -d3en1h2 -den2i4e2 -den1i -dens5a -de4n2s -dens5er1 -den5tit -dent2i -de3od -deo3l -deon2 -de3ont5 -de1p -depen4 -deposi4 -de1po -de3p4os -de2p4u -d3eq -derac4 -der1 -de3r2ai2 -d4er2e -4d4e2r4ed -de5reg -3der3er1 -1deri -der3k4 -3derm -der4mi -der5min -5derne -der2n2 -3dero4 -der5os -de2r3s2 -5d2eru -4d4es. -de3sa -5de2sc2 -des4ca -de5sc1al -de3sec -des4i -de3s4i2d -des5ig1n1a -des2i4g -desi2gn2 -des1p -des5p4o2n -des2po -de3sq -d3e4st. -dest2 -de2s3ti -1de1t -de3t2es -de5th1 -de2ti -dev3i4l -de3vis -de3vi2t -de4v2oi -de1vo -devol5u -devo2l -3dex -2d5f -dfol4 -d1fo -d2g -dg4a -d1gel4 -d4gen -d3gr2 -4d1h -dh2ot4 -d4hu -4di. -1d2i1a2 -di2ad -3dia2r -di5at5om -di3at -dia2to -4d1ib -d1ic. -dic5a2m -d4i4ce -d4i3ch -d5icl2 -dic5ol -d2i2c2o2 -1di2ct -dic5tat -dic2ta -dic4te -5dicul -d2icu -d5i1cu4r -1di2d -di4e2r1s2 -die2 -di1er1 -3di3ev -d4i3fo -d2i1f -dig3al -d2ig -di3g2a2m -dil4 -5dill2 -dilo4 -d4i3lu -di5mer1 -dim2e -di1me2t4 -di1m1i -2d1in -din4e -d4in5g2i -di4ng -d4i5nos -d2ino -3d2i1o2 -di2o4c -di4ol2a -d4i1ol -di2p5t2 -d2ip -3dir2e -d2ir -di3r1i -4d5iro -di4s1 -d4i2s3c2 -d4is3en3 -3d2is2i1a2 -3d2i4s1s -d4it4as -dit1a -d4iter1 -dithe4 -d2ith -d3ito -dit1or3 -2dity -1d2i1u2 -1di1v2 -di4val -d2iva -di5vin2e -di2v1in -dix4i -d2ix -d1j -2dl4 -d1l2a -5dle1f -5dlest2 -3dlew -dlin4 -d1lo -d5lu -2d1m -4d1n2 -1do -4do. -d4ob -do4c3u -d2oc -do2g4a -d2o1g -do4j -d4ol. -dol3en -do5l4ine -dol5it -do4lon -do1lo -d4o4ls -5dom. -d2om -do1ma2n4 -do1ma -domin5 -dom1i -dom5ino -dom5i2t -do5mo -don4at -don1a -4do2ny2 -3d2oo -d2or -4dor. -d2or4m -d2ort4 -d4os -do5sim -dossi4 -do4ss -dot1a -d2ot -dot4t1in -do2t1t4 -dot3ti -2dous -do4u2 -d4own -3do4x -d1p -dr2 -d5rail -dr2ai2 -d3ral -3dr2a2m -dra2n4 -d4ras2 -drast4 -3drel -dr2e -dres4 -dres2s5o -dre4ss -dri4e2 -d4r2i1f -dr2i4g3 -d4r2om -dro1pho4 -dr2op -dru4n2k3 -drun1 -4d1s2 -d5sl4 -d2s3m4 -ds4mi -d4sw2 -dt4 -dt5ho -1du -2du. -du1at -d2ua2 -3d4uc -du4ch5 -duci5a2n -du1c2i2 -duc2i1a2 -du4c2o2 -du5eli -du5ell2 -du5en -du5e2t2t4 -due4t -du5in -d2ui2 -dul3c2 -d3ule -d4ul4l2 -dum4b2e -du2m1b -du4n4a4 -dun1 -d5un4c -d2u2p -du3p2l2 -5dur1o -du4r -d5use -dust5er1 -d4ust2 -duste2 -du3u4 -d1v -dver2 -dvert3 -dvoc5at -d1vo -dv2oc -dvoc2a -2d1w -dwell3 -2d2y -dy4ad. -dy1a2 -d1y5a2r -5dy4e -5dyk -dyl2 -dyll3 -5dymi -3dyn1 -dys3p -d4y2s -d3zo -ea2 -4e1ab -e1a2ct -eac4te -ea5cu -e5ad1d4 -ead3er1 -ead1i -ead3li -ea2dl4 -ea4g -e2ak1 -eal3a -ea2l3er1 -e2ale -ea3l2o1g -eam4bl2 -e2a2m -ea2m1b -eam3er1 -eam2e -ean5i -ea2n -e2ap2 -eap5er1 -e3a4p1p2 -ear3a -ea2r -ear3er1 -ear2e -ear4li -e5a4r2r4 -ear4te -eart2 -earth5i -ear2th -eas5er1 -eas2e -ea4son1i -e2aso -e1a4s1s2 -eassem4 -eas4t2 -east5i -eat5en1i -e4at3er1 -eat5ie2 -e3a2t2i1f2 -eat1it4 -eat4it3u -e3at1ri -e2a2tr2 -e4a2tu -e2au3 -eav5i -e2av -eavi4e2 -eav5o4u2 -ea1vo -eaz5i -e2az2 -e1b -eba2r4 -e2b2b2 -eb2e4 -e4bel. -e1bel -e4be4ls -e2ben -eb5et -eb2i -e5b1il -e4bin -e4b2is -e4bl2 -e4bos -ebo2 -ebo1t3o -eb2ot -e2br4 -eb1ra -e2b2t -e4b4uc -ebus5i -ec2a -ec3ade -ecad5en -ec2al5e -ec1al -e5c2a2m -e4ca1po -e1c2ap -ec3at -ec5a2th -e1ce -ecen2t5o -e1cen2 -e3cent -ech3i -e2ch -e4cib -e1c2i2 -ec2i4f -ecip5i -ec2ip -e1cl2 -ec3l2ip -econ4s2c2 -e1c2o2 -econ1 -eco4n3s2 -econstit5 -econ3s2t2 -e2c3or1a -e4c5oro -ec3rat -ecr2 -e4c5rea2n -ec2r2e -ecrea4 -e4crem -ec1ro -ect5ati -e2ct -ec2ta -ec4ter1 -ec1te -ect2i4c -ec4tit -ec4t5us -ec1tu -ec1ul -e5c2ul2i -2ed -e5da4n2s -eda2n -e2d1at -ede2 -2e4d2ed -e5de1h2 -e4d2ele -edes3t2 -ede3te -e1de1t -edeter5 -e3dev -e5dew -ed4g -edi4a4ls -e1d2i1a2 -ed5i4c1al -ed5i4c5s4 -ediges4 -ed2ig -edi1ge -ed5i1gr2 -ed3im2e -ed1it -e1di2v2 -ediv5i2d -ed3li -e2dl4 -ed2or4 -e1do -e4do4x -ed1ro -edr2 -edu5cer1 -e1du -e3d4uc -e2dul -ed3u1lo -e4d5u4r -ee4ce -eed3er1 -e2ed -eede2 -ee4do -ee2f -ee5g -ee1i -ee2l1i -ee2m -eem5er1 -ee1m2e4 -eem3i -eep1 -ee4pa -eer4in4e -eer1 -eer3in -eesi4 -ee3to -e1f -efac2t5o -e1f2a -efa2ct2 -efal4 -ef5er3ee -efer1 -ef2er2e -ef5ini4te -e2fin -ef2in1i -e4fite -ef4l2 -efor5est2 -e1fo -ef2or2e -2e3fu -e4fug -efut5a -e1gel3 -eg2i5a2 -e1g2i -e4gib -e3gl2a -e2gl2 -eg3le -eg4mi -e2g1m4 -eg5n1ab -e2gn2 -eg1n1a -e5g4on -e2gr2 -e5gu4r -e1h2 -e5ho -e2h5s -eh2y2 -ehyd5r2 -ehy1d4 -ei2d4 -5ei1do -4e2i1f -e2ig2 -e5ignit -ei2gn2 -eig1n1i -e4in. -e3inc -e2ine -e1i4ng -e2in5i -e4ins. -ei4n1s2 -e2i4p4 -eir3o -e2ir -4eis -eis3i -eit5er1 -e2ith4 -e2iv -eiv3er1 -e2iz -e1j -ejudic4 -eju1di -ek3en -ek5is4 -ek4l2 -e4l4ac -el2a -e5lad -el5age -el2a2m4 -el5anc -ela2n -elast3 -el4as -e4la2t2es -ela4te -el5at3ive -ela2t1iv -elch5er1 -el1c2 -elch4e2 -el2ch -eld3er1 -2ele -elea5g -elea2 -4e4l2ed -el5en1i -el3en3o -ele3o -ele5ph1 -e2l1er1 -e1les -e5le4s4s -e4leste2 -elest2 -el3et3o -e1let -el3ev3a -e2lev -ele3vi -el5ex -e4l3ica4 -e1lie2 -eli4e2r1s2 -eli1er1 -e3lim -el3i4ng -eli3on -el2io2 -e4l1i4s -el2i2t4t4 -el1it -e3l4iv -el4l1ab -ell2 -el1l2a -ell5iz -el1li -e3l2oa2 -e3l2oc -elo5c2a -eloc3u -elo4di -e2l2o1g -elom5ate -el2om -elo1ma -elo3mat -el5op. -el5o2ps2 -elp5in -el3so -e4ls -el5tie2 -e1lu -elu4m -elus4 -elv4 -e5ly1i -e2ly -3elyt -e2m3ago4 -em3an3a -e1ma2n -e1ma2r4 -emarc5a -emar1c2 -em5at1iz -em2at5ol -ema2to -em5b2i -e2m1b -e1m2e4 -e4mee -e4mel -e3me2m -e4m3era -emer1 -em5ero -emet4e -e1me2t -em4icis -em1ic -emi1c2i2 -e4mie2 -e2m2ig -emig5ra -emi2gr2 -em3in1a -em5i4ng -e3m2i3o2 -em3i2s1m4 -e4mit1a -emi2t -e4m2i1u2 -em4mae -e2m1m2 -4emnit -e4m1n2 -emn1i -emo3b2i -e1mo -emo2d4u -emod1 -e2m2o1g -e4m2oi -em3o1lo -em5o1m -4em4on -e3mon1i -emon5ol -emo4no -e2mor -em5oris -emor1i -em3o4r1r4 -e4mot1ic -em2ot -e5m2oz -em1pa5r -em1p -em3pa -empara5 -em5pes -4emp4li. -emp2l2 -em4pr2e -em1pr2 -em3um -e5mut -en3a2c -en1a -e4nal -en3a1m3o -e1n2a2m -en4an1n4 -ena2n -e2n3a2r -en3as. -en2as -ena5tur2e -en2a2tu -enatu4r -3en1cep -en4cile -en1c2i2 -en3cil -en2c1t4 -2e4nd -en4d5al -en2da -en4de2dl4 -ende2 -en1d2ed -end5rit -endr2 -4ene -e2n2e5d -en3ee -e5nelle -enell2 -e5ne2p -e2n1er1 -e5nereo -en2er2e -ener5v2 -en5esi -e3ne4ss -en1et -en4e2t2t4 -e2n3eu -e3n4ew -en3g2i -e4ng -en3ic -en1i -en5i1er1 -en2ie2 -en3i1g3r2 -en2i2g -en5in -enit5u -e4n3k -en1o -en3oi -en2o2m -en3oty -en2ot -enov3 -e4n2s -ens5a2l -en3sp -en4s4u2m -en3su -en4sus -ent3a2r -en4te2r1s2 -en1te -enter1 -en5t2i2a2 -ent2i -en4ti3fy -en1t2i1f2 -en2t2o -en4tri -en1tr2 -ent5rin -ent5up -en1tu -en4tus -4en2u -en3u1a2 -en3uf2 -en3u4r -en5ut -5enwa -en5w -eo3b -e4o2ch -e2oc -e4oda -eof2 -eo2l -eol5ar. -eol2a -eola2r -eol5at -eolo3g2i4 -eo1lo -eo1l2o1g -e5olu -e2o3m -eon4a -e3ont -e1o2p4t2 -e1or1 -eor4de -e2or3e -eor5o -eo1s2 -eo4t2o -e2ot -e1pa -ep4al -ep5ar1c2 -e1pa2r -epa4t -epend5en -ep2e4nd -epende2 -ep5ert2 -eper1 -e4pete -e3pet -epe5ti1t2io2 -epe2ti -e4p5ex -eph1 -eph4i -e2p2ig -e5pl2a -ep2l2 -ep3lic -epol3a -e1po -epol3i -epol2it5 -ep3re1h2 -e1pr2 -epr2e -ep3res5e -e4p5ri4m -e4p5rob5 -ept3or -e2pt2 -e1p4u -e3pu4r5 -e4puta -equin4 -equ4 -eq2ui2 -equ2i5no -er1 -era4cie2 -er2a1c2i2 -era4do -er2ad -era4g -era4l -er3aph -er2ap -er3ap1i -er3a2p4y -4era4ti. -4era4tim -er5a2tu -er3bat -er1b -er2ba -er3b2e -er2b5os -erbo2 -2er1c2 -er3ch -er3cl2 -2erd -er2d5a2r -er4di4e2 -2er2e -er3eal -erea4 -4e2r4ed -er3e2gr2 -er5el. -er5ell2 -er5e4ls -e4re1m2e4 -er3en -5er2e4nd -er4en4e2 -ere5o2l -e3re1q -er3er1 -ere4s -er5ese -er3esi -er5este2 -erest2 -er5e2sti -eres5t4r2 -eret4 -er3et. -er3e4t1s -er3e2t2t4 -ere4v -er3ex -ergi3v -er1g2 -er1g2i -er3gl2 -er3ia. -er2i1a2 -er4ia2n -eri4ci2d -eri1c2i2 -5er5ick1 -er2i2d -er3ie2 -er3i2ff -er2i1f -er4i1me2t -erim2e -er3in -eri4n1a -eri4on -er2i3o2 -er3io4u2 -er4i2s4c2 -er4i5sta -erist2 -4eri2t -e3r2i4v -er5iz -4er1j -er2k4 -er3m2e -er4m2oi -er1mo -5ernacl2 -er2n2 -er3na2c -ern1a -er5nal1is -er1n3er1 -er1n3is -ern1i -ern3it -4e4ro. -er3o2i4d3 -e4r2oi -er4o5is -ero5st2 -erpent5in -erpe2 -er3pent -erpent2i -erre5l2a -e4r1r4 -erre2l -err2e -er4rep -er5s2ine -e2r1s2 -er2sin -er5t4ed -ert2 -er4ter1 -ert5er. -ert5e2r1s2 -er4th2i -er2t5iz -2eru -eru4b -er2u5d -eru4n2d5 -erun1 -er4vi4l -er1v2 -5erw2au2 -er1w -eryth3 -e2r2y -ery2t -2er2z -4es. -es5a4m -es5a2n -e2sc2 -es5ca2n -es1ca -es5che2 -es2ch2 -esci5e2 -es1c2i2 -escut5 -e2s1cu -e3se2a2 -e3se2ct -e5see -e5seg5 -ese4l -es5enc -e3sh4a2 -es2h -e1shi -e5shu4 -esi4a2n -es2i1a2 -es5ic. -e5s2ick1 -es5id3en -es4i2d -esi4de -esi5d2i1u2 -es5ies -esie2 -es3im -e2s3in -e5sion -e1s2io4 -e4s1it -es4it. -es4i4t1s -e3sk1in -es2k2 -e3s4mi -e2s1m4 -e2s4od -es3ol3a -es2ol -es3ol3u -es3on1a -es2o3p -e1sor -es3per3 -es5pir1a -esp2ir -es5pit -es4p2l2 -es3plen5 -esple2 -es5p2ot -es2po -e5s2pr2 -es4s3a2n -e4ss -es1sa -essa2r5 -ess5ee -es4sil -es2so -es2t1a4b2 -est2 -est3a2n -e5sta2r -es5t2au2 -e2sti -est5ifi -es1t2i1f2 -est5igati -est2ig -estig1at -e3st2oc -es5t2oo -est4r2 -es4tud4 -es1tu -e1su -e2s3ul -es4u4r5 -et2a -et3al. -et5all1is -etall2 -etal1li -et3al5o4 -eta5m2e -et2a2m -et2a3p -et3ar1i -eta2r -et5a2r2y -et4as -et3ate -et3ati -et5ay -et3eer1 -etel1l5i -ete2l -e1tell2 -etend5er1 -et2e4nd -etende2 -et5en1i -eter2 -et3er3a -et5er3i1a2 -e1teri -e3tex4 -e2th1 -ethy2l3 -eth2y -2e1t2i2a2 -e3t2ic1u -et1ic -e3t4i4g2i -et2ig -e5tim -et3in -eti4n1a -e3t2ir -et5i2t3iv -e3t2i4u2 -et5o1lo -e5tomet1e -et2om -etom2e -eto1me2t -e2ton -et3on1a -etor3i -et1or -etra5g -etr2 -4e4tral -etr2a5m -et4ra2n -et5re4s4s -etr2e -et1ri -et4r2i1a2 -etrib5a -et2r2ib -e4trim -et1ro -e2t2t4 -et3ter1 -etud4 -et3ud4e -e4tum -et4we -et1w -e2t5z2 -eu3d2i3o5 -eue4 -euk5 -4eum -e3ur1g2 -eu4r -eur5i -eus4 -eu5ten -eu3ter1 -eut3i -ev4abi -e2v3ab -e1v2al5e -ev2a2p3 -ev3ast2 -ev3at -ev5eli -e2vel -eve4n -ev5erat -ever1 -ev5er3en -e4v4er2e4 -ever4er1 -e4veri -e4ves -e1v2i1a2 -e4vi1ab -e2vic2 -evic1tu4 -e5vi2ct2 -evi2d3 -ev5ig -ev4ile -evi4l -ev5is2h -evi2s5in -evis5o -e4v2i1u2 -ev2oc3 -e1vo -evol5e -evo2l -evol5ute -evu4 -e1wa -e4wag -e5w4ay -ew1er1 -e3wh2 -ew5ie2 -ewi2 -ew1in -ew5is2h -e3wit -e1wr -ex5i4c -ex1i -ex4on. -ex1o -exo4n -1ex3p -4ey. -ey4as -ey1a2 -eyl4 -e4y3s2 -ez5er. -ezer1 -ez5e2r1s2 -ez5ie2 -ez1i -1f2a -2fa. -fab4i -f1ab -fa3ce2t -fa2ct2 -fa2c3u -2f3ag -fal2l5in -fall2 -fal1li -5falo -fa5lon -fals5ifie2 -fa4ls -fals2i1f4 -4fan3a -fa2n -fan5tas1iz -fan2t2a -fantas3i -fant3i -5fa2r -far3i -5faw -4f5b -2f5d -2fe. -3feas -fea2 -fe4a3tu -fe2b5r4 -fe1b -3fec -2f2ed1 -5fei -fe1li -fem3i -femin5 -fend5er1 -f2e4nd -fende2 -f5en1i -4f4e2r4ed -fer1 -f2er2e -fer3ee -3fero -fe5r2oc -fer5om -3fe4r1r4 -fer3v2 -2f4es. -fes2s3o -fe4ss -fest3a -fest2 -fe2st5i -fe4t -fet4al -fet2a -fet4in -fet4o -3feu -fe5veri -fever1 -2ff -f1fe -ffec4te -f3fec -ffe2ct -f5fe4t -f1fi -f5f2i1a2 -f3fic -f5fie2 -f1fi2l3 -f2f3is -ff4le -ffl2 -ff3lin4 -f3f2oc3 -f1fo -ffon1i4 -ffo2n -ff2or3e -f3fr2 -ffran2ch5 -ffra2n -4f5h -fi5ance -f2i1a2 -fia2n -f4ib5u -4fic. -4fi4c1al -3fi1c2i2 -4fi4c5s4 -fi5del -fi2d -fid3en -fiel4 -fie2 -fi2er4c2 -fi1er1 -figh2t5 -f2ig -fi2gh -1fi2l -2fin -fin2a -fi3na4l -f2i4nd3 -fin2e -f1i4ng -5finin -f2in1i -fin4n1i -f2in1n4 -fir2m1 -f2ir -f3it1a -f5it3ee -fl2 -3fl2a -fle2s -f3lica -flin4 -3flo -flo5ric -flor1i -3flu -flu1m4i -1fo -4fo. -3f2oc -fo2e -foet3i4 -fo1et -fo1l4i -fo4li2e2 -fomen4t4 -f2om -fom2e -fo1men -fo2n -fon4de2 -fo4nd -3f2oo -fo5r2a2m4 -for1a -for5ay -for5b -for4di -fore3t -f2or2e -5f2orm -for4m3a -fortu5n4a4 -f2ort2 -for1tu -for3tun1 -fo3v -1fr2 -frag5a -fran2t4 -fra2n -fra2r4 -frat2ch4 -fra2tc2 -fre4s -fr2e -fros4t5i -frost2 -fr4uc4 -2f3s -fs4p -2ft -f1t4ed -f4ter. -fter1 -f2t5es -ft2i4et -ftie2 -ft4ine -ft1in -3fu -4fu. -f4u4c -fuel5li -fuell2 -fug4a -fu4min -fu1mi -fu4n2g -fun1 -4fu2r4ed -fu4r -fur2e -fur3n2 -fu3sil5 -fus5o -fu5til -fut2i -4ga. -ga4cie2 -g2a1c2i2 -gadi4 -ga4d4os -ga2do -3gag -3g2ai2 -3g2ale -ga5len -gal2i4a2 -gal5ler1 -gall2 -3galo -gam4bl2 -g2a2m -ga2m1b -gan5at -ga2n -gan2a -4ga2n2ed -gang5er1 -ga4ng -g5ant. -gan4t4r2 -g5an4t1s -g5ar1c2 -ga2r -g4ar2e -gar3ee -gariz4a1 -gar1i -gar1iz -ga5r2ot -gar5p -5g2a4r1r4 -1ga4s -gas5i -g2as3o -gas2ol5 -gas2s5in -ga4ss2 -gast3r2 -gast2 -g1at -g4at. -ga2t5iv -g4a2to. -ga2to -g4atos4 -g4a2t1t4 -g2a2t5u -gaud5 -g2au2 -ga5z1a1 -g2az2 -g1b -g5d4 -2ge. -5geal -gea2 -3gea2n -2g2e4d -3gedi -5ge4d1n2 -4ge1f -1gel -4g2ele -ge4li -gel4in -gel5li -gell2 -ge4lu -2ge2ly -gem3i -5ge1mo -3gen -gen4du -g2e4nd -gen5it -gen1i -gen3o -gen5t2i -ge4o -geo3lo -geo2l -4g2er2e -ger1 -3germ4 -2g4es. -5ge4ss -gest5at -gest2 -3get -get3a -2g1f -2g1g -gg4a -g2ge -g5ge2dl4 -g2g2e4d -g3ger1 -g5ger3er1 -g4g2er2e -gg2i4a5 -g1g2i -g3gli -g2gl2 -g3glu3 -g5g2ly -ggr2av3 -g1gr2 -g4g4ro -2gh -g5h2ai2 -gha2 -gh5en1i -ghe2 -g3ho -g4hos -gh2t -1g2i -4gi. -gi4all2 -g2i1a2 -gi4at -3gib -g2i5c2o2 -g2i4g -gi5ga2n -1g4in5g2i -gi4ng -3g2io2 -gi4or -gi4ot -5g2ip -gi5pa -g4i4s -5gis. -gi2t1 -5gitu -giv5en. -2gl2 -g3la2r -gl2a -5glass. -gl4as -gla4ss2 -glec4 -3g2ler1 -g4leto -g1let -g4letr2 -g4ley -gli5on -gl2io2 -g5l1is4 -3glo -4g5lod -gl2om3 -4glop -3glu -glu5te -glu5t2i -3gl2yp2 -g2ly -2g1m4 -2gn2 -g1n1a -g4n1ab -g5nate -5gn4a2th -g5nati4 -gna5tu4r -gn2a2tu -gn5e2dl4 -g2n2ed -gn5ee -gn3er1 -g1n1i -g4n2i1a2 -g2n3in -gn4in. -g4n2i2o2 -g2no -5gnor1i -gno4s -2go. -5g2oa2 -3g2oc -5god -3goe -go4et -go4ge -g2o1g -4go3gr2a2m -go1gr2 -g5o2i4d -g2oi -g4o3is -go2m2e -g2om -5gon1n4 -go5n2om -3g2oo -goph4 -4gor. -5gor1g2 -4go2r1s2 -g4o2r2y -3gos -gos4t2 -2go4u2 -gour4i -gou4r -g1ous -gov1 -g3p -1gr2 -gr1ab4 -3gr2a2m -4gram2e -gr2a2p -g4r2e -gril4 -gri2m3a -g4ro -gr2o4g -g5ron -gr2op4 -3gru -gru3en -gr2u5i2 -gru2m4b -2g1s -gs4c2 -gs4t2 -g4sti -gth5en1i -gthe2 -g5to -g4u2a2 -gu5ab -5gua2n -3guar2d -gua2r -g5uat -2gue -5gueu -5guit4 -g2ui2 -gui5t1a -gu2ma -gu4mi -3gun1 -g4uras5 -gu4r -gur1a -g4u2r4ed -gur2e -gur4n2 -gur4u -4gu2r2y -gust5a -g4ust2 -2g1w -2gy -g4y2b -5gym -3gyn1 -gyn5o -g5z2 -ha2 -4ha. -h4ac -hadi4e2 -had4in4e -ha2d1in -hae3o -ha2g2e4d5 -ha3g2i3o2 -ha1g2i -hag5u -ha5ic -h2ai2 -hais4 -hak4ine -h2ak -hak1in -hal5ant -hal2a -hala2n -h2a4m -ha1m5a2n -han4cro -ha2n -han1cr2 -ha4n2g -h1an1i4 -h5an1iz -han4t -han2t3a -ha4pe -h2ap -hap3l2 -har1a -ha2r -har5b -har4d -har5die2 -har2ge4 -har1g2 -ha5ri2s1m4 -har1i -har3o -har4t4ed -hart2 -har4ti -has4te2 -hast2 -ha2t5o -haugh2t5 -h2au2 -hau2gh -ha2vel4 -h2av -hav5ersi -haver1 -have2r1s2 -ha1v5o -h1b -h1c -h1d -hdeac5 -hdea2 -h1du4 -he2 -4he. -h2ea2 -1head -3hea2r -hear2ch4 -hear1c2 -heas4t5 -heav5en -he2av -he2c3t4 -he5del -h2ed -hede2 -he3do -heek4 -h4ei -h4e3is -he5lat -hel2a -h5elin -he3l2io2 -he5l2i1u2 -hel4li -hell2 -h3el3o -hem1a -he3men -he1m2e4 -hemis4 -he5m4op -he1mo -hem4p -hende5 -h2e4nd -he3or1 -hep1 -h1er. -her1 -her4as2 -her2b -her2b3a -herb3i -here3a4 -h2er2e -here3o -h5er3e2t2t4 -heret4 -h5erh2 -her5ial -her2i1a2 -h5erin4e -her3in -h1erl -her5om -h4eron -h1e2r1s2 -h5erwa -her1w -hes3t4r2 -hest2 -het1 -h4et3a -het3i -het4t4ed -he2t2t4 -heu2 -h4eum3 -heumat5 -heu1ma -he4v4 -hev5i -hex5o -h1f -h5h -2hi. -hi4a2r -h2i1a2 -h1ic -hi3c4a2n -h4i4cin -hi1c2i2 -h4icl2 -h5ie. -hie2 -h1i1er1 -h4i4e2r1s2 -h1ies -h3ifi4 -h2i1f -h3i3fy -hig4o -h2ig -hi5ka -h2ik2 -hi4l -hi5ma4 -hi5mer1 -him2e -himos4 -hi2mo -h1in -h2i4n4d -h2in2e -hi5n2ie2 -h2in1i -h5in1iz -hi5nop -h2ino -h2i4n1s2 -hio5lo -h2io2 -h4i1ol -h4i1or -h2i2p -hip3l2 -h4ir -hi4r4r4 -hir3r5i -hit4a -h2iv5a -4hl -h3l2a -h1le -h3let -h1l2i -hl2i4a2 -2h1m -h4man3ic4 -h1ma2n -hman1i -h5mica -hm1ic -2h1n2 -hno1cen5 -hn2oc -hn4o3ce2 -4ho. -ho3a2n -h2oa2 -ho4c2o2 -h2oc -ho3don -ho2do -ho5du -ho5ep5 -hol3a2r -hol2a -hold1 -hol4is. -hol1is4 -ho5l4y2s -ho2ly -ho4mag -h2om -ho1ma -hom5in -hom1i -h2o4n -hon5em -ho2ne -ho5neu -hon3ey -hon2g3i -ho4ng -ho5n2io2 -hon1i -hon1o -1hood -h2oo -hoo5r -h4ope -ho2p5r2 -h4op4te -h1o2p1t2 -hor5et -h2or2e -h4or2n2 -horn5i -ho5r2o1g -hort5h -h2ort2 -hosi4 -ho4ton -h2ot -ho1t2o -h2o4u2 -3h2ouse3 -4h1p -2hr -hras5eo -hras2 -hr2as2e -hr2e4 -hre5ma -hr5er1 -hres4 -hri4 -hril2l5in -hrill2 -hril1li -hrim4 -h5rit -h3r2od -hrom4i -hr2om -h2r2y4 -h3rym3 -2h1s -hsi4 -h4s2k2 -ht5ag -ht5ee -ht3en. -ht5e2n1er1 -h4t4ene -ht3en1i -ht3e4n2s -ht5e2o -h2t5es -ht4f2oo -h2t3f -ht1fo -h1th -ht4ine -ht1in -hu4g -hu4mat -hu1ma -hu5mer1 -hum2e -hu4min -hu1mi -hun4c -hun1 -hu4n2k4 -hun4t -hur3i -hu4r -hu3s2i1a2 -huz4 -h1w -h4wart2 -hwa2r -h2y -hy2l -hyl5en -hyle2 -hy2m -hyn4 -hy3o2 -hyol5i -hy1pe -h2yp -hy3ph -hyr4 -hys3te2 -h4y2s -hys1t2 -hy4t -2i1a2 -ia4bl2 -i1ab -iab5ol1is4 -iabo2 -iabol3i -iab5ol1iz -i2a2ch -ia1c3o2 -i2ac2r2 -ia5cri -ia5d4em -i5ae -iaf4 -i2ag4 -i4a3g1n2 -i5a4g5o -ia3gr2 -i3ah -i5ai2 -iale2ct4 -i2ale -ia3lec -i3al1it -ial5li -iall2 -4ial1n4 -i2a3lo -i2a5ly4 -i5a2m1b -i2a2m -ia3m2e -ian2ch5 -ia2n -i3ant -i5ape -i2ap -ia3ph -i2ard -ia2r -4iarit -iar1i -i3at -ia5the2 -i4a2th -i5at2om -ia2to -i2a2t4u -iat3ur4a -iatu4r -i3au2 -i2av4 -ib3era -ib2e -iber1 -ib1i -i1b2i2o4 -ibios4 -ib5li -i2bl2 -4ibo2 -i4bon -ibor4 -i4bose -i5bo4u2 -ib1ri -ibr4 -4ibu -ib3uta -ic3ac -ic5a2do -i4c1al -ic1a2n -2i1ca2r -iccu4 -ic1c4 -4ice -i5ceo -4i2ch -ich4i -ich5i4ng -ich1in -ich5ol -i1cho -4icin -i1c2i2 -i5c2io2 -2ick1 -ic4lo -icl2 -2i2c2o2 -i3c2o3c -ic5ol3a -icon3o -icon1 -i5cop -icoty3le5 -i5coty -ic2ot -2i1cr2 -i4cri -i4cru -i4c2r2y2 -ic4te2dl4 -i2ct -ic1te -ict4ed -ic4ter1 -ict5ic -2icu -icu4lu -ic3um -i5cun4 -i5cut -2i1cy -i2d -id1a -i5d4ay -i1d4e4m -id3enc -id3era -ider1 -i3derm5 -i3d2icu -id3i1f -i5d2ig -i5dil4 -i3dim -id4ine4s -i2d1in -idin4e -idios4 -i3d2i1o2 -id2ir4 -id1i4s4 -id4ist2 -2i4d1it -i1di4v2 -id3li -i2dl4 -id3ol -i1do -idol3a -4idom1i -id2om -id3ow -4idr2 -id5ri -id3ul -i1du -ie2 -4iec -2ieg2 -ie3ga -ie5i -i5ell2 -4iem -2i1en -i2e4n2d -i1er1 -i3ere4s -i2er2e -i2eri -ier3i4n -4ier2n2 -ier2o -i4ert2 -i3e2sc2 -ies3e4l -i1es2t2 -i3e4st. -2i1et -i4et. -ie2t3ie2 -4ieu -i5eut3i -iev3a -iev3er1 -ie1v3o -2i1f -i2fe -if4f2a -i2ff -iff5ler1 -iff4le -iffl2 -i4f3ic. -i4fic3ac -i4f5i4c5s4 -ifi4d -i2fi4n -4i2fl2 -i3fo -i3f2oc5 -if5tee -i2ft -i3fy -2ig -i3gad -ig3a4nd -iga2n -3iga2r -i1ge -i3ger1 -ight5er. -i2gh -igh2t -ighter1 -ight5e2r1s2 -4i1g2i -ign5iz -i2gn2 -ig1n1i -ign2o5m -ig2no -i3gon -ig1or -ig3ot -i5gret -i1gr2 -ig4r2e -i4g5ro -i5gu5it4 -ig2ui2 -ig1u4r -2i1h -ih2y4 -2ii -i5in -i1ja4 -4iju -2ik2 -ik5a2n -ike4b -i2l3a -ila4g -ila5te2l -ila4te -i5l4ater1 -il4a4x -il5dr2 -il4du -i3len -ilesi4 -il3f -il3ia. -il2i1a2 -il3ia2r -ili4arl -i3li1c2i2 -i5l2i1en -ilie2 -ili4er1 -il4i4fe -il2i1f -il4ific -il1in -il5i2ne. -il4ine -4ili3o4u2 -il2io2 -il5i4p1p2 -il2ip -il5i1q -il4ite -il1it -ilit5u -il4mo -ilm2 -i5lon -il3o4u2 -ilth4 -il2tr2 -4ilu -il5ul -i5lum -il5ur2e -ilu4r -il3v -4ilym2 -i2ly -ima4c -im2ag -im3a4ge -im1al -i2m5a2m -i5m2as -i4mat4ed -i4ma2t3in -im2a2t5u -im1i -i3m2ie2 -im4ine -im5ino -im5me2s -i2m1m2 -imm2e -i2mo -i5m2o1g -i3m4on -im5oo -i3mos. -impar5a -im1p -im3pa -im1pa2r -impar2ad5 -im5p2ie2 -im2pi -imp2o2t5 -im3po -im5pr2 -im3pu4 -im1ul -im5um -in3ab -in1a -4inace -ina2c -in4a2do -in5a2gl2 -in3a2ir -in2ai2 -ina4l -4inal1it -i1n5a2m -in3a2n -in3ap -in4a2r2s2 -ina2r -i3nas. -in2as -4in2a2ta -inat1or5 -ina2to -in3au2 -in4aw -2inc -inc4t2u1a2 -in2c1t -inc1tu -2i4nd -in5da2r -in2da -inde5p -inde2 -inde4s5 -in1de3t -indeterm5 -indeter2 -in5dro -indr2 -4inea2 -4i2n2ed -in5ee -in5eg3a -4in5eo -ine4s -in3esi -ine5te -4ineu -inev5 -infilt5 -in3f -in1fi2l -infol4 -in1fo4 -4in3fu -4ing2a -i4ng -in5gal -4inge -ing5ha2 -in2gh4 -4in2g2i -4ingle -in2gl2 -4ingli -4in1go -4in1gu -in2g3um -2in1i -in5ia. -in2i1a2 -4inic -in4i1c2i2 -in3ion -in2io2 -in4itud -4i4n2k -ink4ine -ink1in -4i4n1l2 -2in1n4 -2ino -4i4no. -in3oi -i5nole -4inos -i3n4os. -in5ose -in3osi -4in1q -i4n1s2 -in4s2ch5 -ins2c2 -inse2 -inse2ct5 -in5sec -insec5u -in3si -5ins2k2 -insolv5 -ins2ol -in4tee -in1te -int5e4ss -in2t2es -in3til -int2i -int5res -in1tr2 -intr2e -intu5m -in1tu -2in2u -in5ul -in5um -in3un1 -in3u4r -invol5u -in3v2 -in1vo -invo2l -2io2 -io3a2ct4 -i2oa2 -i1od -iod3i4 -io2d5o -ioe4 -io3gr2 -i2o1g -4i1ol -io3ma -i2om -i4oman1i -io1ma2n -io3mo -i5ope -io3ph -i5o1po -io2p4s2 -i1or -ior2a4m4 -ior1a -4i2or2e -4iorit -ior1i -5ior1iz -4iorl -ior4n2 -io3sc2 -i3ose -i3osi -i4oso -i4o5sta -iost2 -i3ot -iot4a -i4o5th -iot5ic -i4o5tr2 -i4oty -i4our. -io4u2 -iou4r -i4ou2r2s2 -i5o4x -2ip -ip3al -ip2ap4 -ipar3o -i1pa2r -ipart5ite -ipart2 -ip1at -i3p2e4nd -i1ph2e4 -iphen3 -i5pheri -ipher1 -iphi4 -i4phu -ip3i2d -i5p2il -ip3in -ip4in4e -ip2ir4 -ip5is -ip1i4t -ip4iti -ip3lin -ip2l2 -ip3lo -i3po -i4p2o1g -i4poli -i4p2om -ip4o2n3 -i4pow -ip2p2l2 -i4p1p2 -ip3pli -ip4r2e -i1pr2 -ip5tor1i -i2pt2 -ipt1or -ip1ul -i5put -i2p4y4 -2iq -i3q2ua2 -iqu4 -2ir -ir1a -ir4abi -ir1ab -ira4c -ir4ae. -ir4ag -ir4al1in -ir4al1li -irall2 -i5r2a3so -iras2 -irassi4 -ira4ss2 -ir4ay4 -ird3i -ire3a4 -ir2e -ir3ec -ir5ee -irel4 -ire5li -ires4 -ir5e4ss -ir1i -ir2i4d -ir4im -ir4is. -5ir1iz -irl5i4ng -ir5o2ch -ir2oc -ir5ol -ir3om -ir4q -i2r2s2 -ir5ta -irt2 -ir5tee -irwo4m2e -ir1w -ir1wo -ir3w2om -i4sa -is5ad -is3age -isa4g -is1a2l -is3a4m -is1a2n -is3a2r -is5av -4i4s3b -i2s3c2 -is5chi -is2ch2 -isci5c -is1c2i2 -4i1sec -ise5cr2 -is3ell2 -4is3en -is2er1 -is5er2e -i2s3et -4iseu -is3ha2r -is2h -isha2 -ish5ee -ishe2 -4ish3io2 -ish3op -is5hor -2is2i1a2 -is5ic -is3ie2 -4isim -is3inc -i2sin -4is1is2 -is4ke2 -is2k2 -i2s1l4 -islun4 -2isma -i2s1m4 -is1on -is5on1er1 -is4o2ne -is2o5p -is1p -i3s2ph2 -5is1pr2 -2i4s1s -iss5ad -is1sa -is4sa2l -is5sa2n -is4s4iv -is1s4o -4ista -ist2 -is4tal -ist5enc -iste2 -ist5ent -is5ter3er1 -i4s2t2er2e4 -ister1 -4is1th -is4t3ic -4i4s2tl -i4s1to -4is4t2om -is1tr2 -3is2t4r2y -4is4ty -i5s2ul -is3u4r -2is2y -it1a -i2t5ab -ita4c -4i1t2ai2 -it3a2m -it4an2a -ita2n -it4as -it3at -i3te2ct -it3ee -it3enc -it3ent -it3era -iter1 -2ith -itha5l -itha4 -ith5i -i5thol -i1t2h3r -ith2y5 -2i1t2i2a2 -it2i4c2o2 -it1ic -it5ic1u -it1ie2 -it3ig -4i1tim -it4in. -it1in -i4t4i4n1s2 -4itio. -i1t2io2 -4itio2ne -i5t2i4q -4i5tit -i2t3iv -it4li -i2tl -it5lo -4i2to. -it5ol -2iton -it1o4u2 -2itr2 -it5re4s4s -itr2e -i4tric -2i2t1t4 -it4tit -it3ti -itu4a4ls -it2u1a2 -itu1al -it5ua2r -4itue -it1ul -it1u4r -it3us -2i1u2 -i3um -iur5e -iu4r -2iva -iv5anc -iva2n -iv1at -i4v2ed -iv5el. -i2vel -iv5el3i4ng -iv5e4ls -i4ver. -iver1 -iv3eri -i4vers. -ive2r1s2 -iver5sa2l -ives4 -iv3et -i4vie2 -iv3i1f -i5vil1it -ivi4l -ivil3i -5ivi4st. -i2v5ist2 -5ivi2s4t3s -iv1i2t -i2vo -iv2oc3 -i5v2or2e -2i1w -2ix -ix3o -i5ye -1iz -4iz2ah -iz1a1 -iz3i2 -2izo -iz5oi -2izz2 -1ja -2ja. -3jac -ja2c5o2 -jac3u -jag5u -jal4 -ja5lo -ja5pa2n -j2ap -j4apa -jel5l2a -jell2 -jeo2 -jeop3 -4jes -jeu4 -jew3 -2ji -3j2ig -jil4 -jill5 -5jis. -3jo2 -4jo. -jo1c5o2 -j2oc -joc5u -jol4e -4jr -4js -ju1di -j2ui4 -ju5l -ju3n2i -jun1 -ju2s1cu4 -j2usc2 -jut3a -ju1v2 -k4abi -k1ab -k2a5bu -ka2ch4 -k3a4g -kais5 -k2ai2 -ka4l -ka5lim -kal4is -k4a2n -k2a3o -k2ap4 -kar4i -ka2r -1kas. -kau4r4 -k2au2 -k2av4 -k1b4 -k1c -kc2om4 -k1c2o2 -k5d2 -k1do4 -kdol5 -4k2ed -ke5da -k5ede2 -3kee -ke4g -k2e4n4d -ken1o4 -ke2p5t2 -ker5a -ker1 -k4er2e -k5erel -k4er4j -ker5o -kes4i -ket5a -key4wo -key3w -k1f -kfu4r4 -k3fu -k3ho -5k2i1h -ki2l -kilo3 -k1in -k2in. -3k2i4nd -kinema4 -kin5et -k3i4ng -k2in4i -k2i4n1s2 -kir3m -k2ir -ki4r4r4 -kis4 -3kis. -k1is2h -ki2t5c2 -k2i4w -kk4 -k5ker1 -k2l2 -k3l2a -k5lea2 -k3ler1 -k3let -k3li -k3lo -k1m -kn2 -k2no -1kn4ow -k2o5a2 -kol4 -ko5m1i -k2om -ko5pe -k1p -k5ro4 -k3ru -4k1s -k3sl4 -ks2mi -k2s1m4 -ks4t2 -k1t -ku4r5 -k5v -k1w -3kyl -l2a -4la. -5la4a -lab5a2r -l1ab -l2aba -la1bel4 -l2ab2e4 -5lab1r4 -l4ac -la2c2a -la5ceo -la5cer1 -la4ch -la2c2o2 -5la5col -lac5on1 -la3cu -la4de -l5a2d1m -l4ae -l4af -la3ger1 -la4g4i4s -la1g2i -la2g3r2 -5l2ah4 -la4ic. -l2ai2 -la1ic -l4al -4l2ale -5laman3dr2 -l2a2m -lama4n5d -la1ma2n -la5mel1li -lam2e -lamell2 -lam4ie2 -lam1i -la1m1o -l5amu -lan3at -la2n -lan2a -la4n2d -3land. -land3i -3lan4d1s2 -lan4er1 -lan3et -lan5tine -lant2i -lant1in -lan4t4r2 -l2a4p -lap1i4 -lar5a2n -la2r -lar5de -4la2r4ed -lar2e -l4as -lat5al -l2a2ta -la4te -5latil1is -la4t1i4l -5latil1iz -5lat2in1i -la2t3in -lat5us -l2a2tu -l4au2 -5lau4r -lav5at -l2av -l4aw -4l2az2 -l3b -lb2e4 -l4bit -l4by -l1c2 -l2c1at -lce4 -l1cen4 -l4c2er2e -lcer1 -lch4e2 -l2ch -l3da2r -l3d2ed -l3de1h2 -l5dera -lder1 -ld3est2 -l5dew -ldi2 -l3die2 -ld4in4e -l2d1in -l5di5ne4s -ld3is2h -ldi4s1 -ld5li -l2dl4 -l3do -4le. -3leagu -lea2 -lea4g -le5a1t2io2 -leav5er1 -le2av -l3eb5ra -le1b -le2br4 -le3c2a -le5cha2 -le2ch -lect5ica -lect2i4c -le2ct -2l2ed -le5dr2 -leg1a -l3ega2n -3le2g1g -le4gin -le1g2i -leg3o -le3gra -le2gr2 -lek4 -4l4e4l2ed -l2ele -lel5o -le1lu5 -lem5enc -le1m2e4 -le1men -lem3is -l5em1iz -5le2m1m2 -l3e4m1n2 -le2mo -l4em5on -l5en2da -l2e4nd -len5da2r -lend4e2 -len4do -l4e1ne -le5n2ie2 -len1i -len3o -4len1t2io2 -lent2i -l4en5u -le3on -leo4s2 -le5q -2ler1 -le5rec -l2er2e -5l4er2i1a2 -l4eric -le5r2ig -ler3om -leros4 -ler3ot -4l4es. -le3s2c2o2 -le2sc2 -3le4s4s -1let -le5tra -letr2 -le5tr2e -5le5tu5 -leu4r5 -2lev -l3eva -5leve -lev5it2a -levi2t -le4wi2 -l5ex1a -1ley -lf5i2d -l2fo -lf3o2n -l1g2 -l4gal -l4gem -lg2i4a2 -l1g2i -l4gi2d -l4g2oi -l3h -4li. -li4an1i -l2i1a2 -lia2n -lias4 -lib1r4 -l1ic. -5l4i2ch -li4cie2 -li1c2i2 -5li5c2io2 -l3ic3on1 -l2i2c2o2 -lict4o -li2ct -l2i4cu -l3id1a -li2d -l4ida2r -5lid3i1f -3l4ieu -lie2 -l4i2fe -l2i1f -l4i3fo -lift5er1 -li2ft -1l2ig -li5ger1 -li1ge -light5i -li2gh -ligh2t -5l2i1h -3l2ik2 -1l4il -lil4i -li2m2b -limet4e -lim2e -li1me2t -lim4p -l4i2na. -lin1a -l4in2as -l2i4n4d -l4ine -5l4in3ea2 -lin4er. -lin1er1 -lin4e2r1s2 -lin4ger1 -l4inge -li4ng -l4in2g3i -5lingt -3l4in1gu -3l4in1q -lint5i -3li2o1g -l2io2 -l4i4ol -li2o3m -li3ot4 -li3o4u2 -5liph -l2ip -li2pt5 -l2i1q -3l2ir -l1is -l4is2k2 -5lisse -l2i4s1s -l1it -l2it. -l3it5a -5liter1 -3l2ith -5l2i1t2i2a2 -3l2itr2 -lit4u -l4iv -l5iv1at -l2iva -liv3er1 -liv5i2d -lkal5o -lka4l -lk5at -lk3er. -lker1 -lk3e2r1s2 -ll2 -l1l2a -ll2a4ba -ll1ab -lla2ct4 -ll4ac -l5l4as -l4l4aw -l5le1b -l1lec -l1leg -l3lei -l1lel -lle5m -l1len -l3lep -l3leu -l3lev -ll3f -l1li -lli5a2m -ll2i1a2 -lli4a2n -llib4e -llic4 -l4licl2 -ll2i5c2o2 -l5lie2 -llig1at4 -l1l2ig -l2lin -l5lin. -l3lin1a -l3l4ine -l5l2io2 -ll4i5v -ll3m2 -l1lo -lloc3a -l1l2oc -lloc5u -l1lo2q -l4lov -llow5er1 -ll3p -l4l3s -ll5t -l1lu -llun4 -l5ly1a2 -l2ly -l3ly1c -l3ly1g -l3ly1h -l3ly1i -l5lym2 -lm2 -l1ma -l1m2e -l4mer1 -lm3i4ng -l5m2i3p -l2m3od1 -l1mo -l1n4 -l3ne -lneo4 -2lo. -5load -l2oa2 -5l4ob3a -1l2oc -loc3al -loc2a -loc5ul -lo4cus. -lo1cus -2lo1cy -l3od1i4s2 -3lo3dr2 -1l2o1g -lo5ga2n -lo2ga -4loi. -l2oi -lo5m1i -l2om -lo2m4m2 -lon4al -lon1a -lo2n4e -l5onel -lo5ney -long5in -lo4ng -lon2g2i -3l4o1n2i1a2 -lon1i -lon2i4e2 -l3onis -l3on1iz -loom5er1 -l2oo -lo2o4m -loom2e -lop4e -5lo5pen -l3o2p1m -1lo1q -l4o2r4ed -l2or2e -lor5i3at -lor1i -lor2i1a2 -lor4i2fe -lor3i1f -lo5rof -loros4 -l4os. -lo1so -lo4ss4 -los5sie2 -lot5at -l2ot -loth4ie2 -l4oth -lo5tu -5lo2up -lo4u2 -lp1at -lp3er1 -lph2 -l5phe4 -l3ph1i4n -l2pho -l3p2ie2 -l3pit -lr4 -l3ri -l3ro -l5ru -4ls -l5s2a4m -ls5a2n -lsi4f2i1a2 -ls2i1f4 -lsi4m -ls4is2 -l5s2k2 -ls4p -l1s2t2 -lt4an3e -lta2n -l4ta4ng -lt5ant -l5ta2r -l1te -l4tei4 -lter2n3 -lter1 -lth3i -lt2i4c2i2 -lt1ic -ltim4a -l1tim -lt1in4 -lti3t -l3t4iv -lt4or -l1tr2 -ltram2ont5 -ltr2a2m -ltra3m4on -ltra1mo -l1tu -l4tus -4lu. -l2u1a2 -lu4ch4 -l4uc -lu2c5o2 -luc5ra -lucr2 -lu4cu -4lue -lu1en -lu5er1 -l2u1i2 -lu4it -lum4bri -lu2m1b -lumbr4 -lu4mo -5lum2p -lu2m5u -lunch5eo -lun1 -lun2ch -lunche2 -5lune -l3unta -lu3or1i -5l2up -3lur3o -lu4r -lus2k5 -lu4ss4 -lut5a2n -4lut5a2r -5lution1iz -lut2i -lu3t2io2 -lution1i -lu5t2oc -lut2o -lut5r2 -lu1v2 -lv5ate -l5vet4 -l4vi -l4vor -l1vo -l3w -lx4 -2ly -4ly. -ly1c -ly4ca -lyc4l2 -ly2c5os -ly1c2o2 -lym2 -lymph5 -lym1p -l2yp2 -ly4pa -lypt5o -ly2pt2 -3lyr -lys5er1 -l4y2s -3ly3w -3lyz -lz4 -4ma. -m4ac2a -mac3ad -ma5ch2in2e -ma2ch -mach1in -5mach2y -ma4cis -m2a1c2i2 -ma2ct4 -4mad. -4mada -4ma4d1s2 -ma4ge -5m4a2g1n2 -2mago4 -2m2ah -ma5ho -3m2a4i2 -4m4ai. -mai2d3 -5m2ak -mal3a4p -mal2a -mal5ar1i -mala2r -5m2ale2 -ma2l5ed -mal3e1f -m3al1g2 -m3al1is -m4al4is. -mal3le -mall2 -mal4li -2m2a2m -mament4 -ma3men -mam2e -m5ament. -1ma2n -3m4an. -man3a -man5da2r -ma4nd -man2da -man3dr2 -man3ic4 -man1i -man4ica -ma5n2il -m4a4n2s -man3te2l5 -man2te -2m2ap -m3aph -1ma2r -5mara2n -mar5ol -ma5ro4n -ma3r2oo4 -mar5ri -m2a4r1r4 -mar4shi -ma2r2s2 -mars2h -mar3v2 -ma3son -m2aso -massi4 -ma4ss2 -mass5i4ng -mas2s1in -3mas1t2 -ma4s4t4ed -maste2 -mast4ic -mas4t1in -m4at. -m4aten -m4a3ter1 -mater5n4 -m4at1it -mat4iti -m4atiz1a1 -mat1iz -m4a3t2o1g -ma2to -mat5om -ma3top -m4a4t1s -3m4a2t1t4 -ma5tur2e -m2a2tu -matu4r -m2av4 -2m1b -mba2t4t4 -m2b4d -m5bec -mb2e -m5ber3er1 -m2b2er2e -mber1 -m4be2r2y -m4be1s2 -mb2i -m2bic -m5b1il5 -m4b3i4ng -m4b2is -mb5ist2 -mbival5 -mb2iva -m5b2ler1 -m2bl2 -m3bli -mbru4 -mbr4 -mbu3l -mbu4r4 -m1c -m5d -m2e -2me. -mea5g -mea2 -me5a4nd -mea2n -me4ba -me1b -me4b2i -2m2ed -4med. -3me1d2i1a2 -med5ic1at -4medie2 -m5ed5ies -3med1it -me4do -m5e2d2y -me2g -5meg2a1 -meg1a5t -4m2ele -mel5ee -mel5ler1 -mell2 -mel3on -mel4t -melt5er1 -mel1te -me2m -4m5e1m2e4 -1men -3men. -2men1a -men4ag -mend5er1 -m2e4nd -mende2 -men1d5o -m4e1ne -ment5or -men2t2o -5men4t1s -5me2o1g -me4p -m5era2n -mer1 -4m2er2e -mer4i1a2 -2me2s -mes5en -me5s2i4a2 -mes5q -3me2sti4 -mest2 -1me2t -meta3t -met2a -met1e -4met4ed -meth4i -me2th1 -met1i4c -met5i1c2i2 -met3o -met3ri -metr2 -m1f -4m3h -4mi. -m1ic -m4i4cin -mi1c2i2 -m2i3c2o2 -3micro -m2i1cr2 -m4i2ct -mi3cul -m2icu -mi4cus -m4idi -mi2d -mi2d4in -mid5on -mi1do -mi5fi -m2i1f -mig5a -m2ig -migh5ti -mi2gh -migh2t -mi2gr2 -4mij -mi5ka -m2ik2 -m2il -m3i2l3a -mil4ad -4m5ilie2 -mil5ies -3mill2 -mi5lo -mil4t -3m2im -mim5i -5m2i4n4d -mind5er1 -minde2 -min4er. -min1er1 -min4e2r1s2 -m4ing5li -mi4ng -min2gl2 -min5ie2 -m2in1i -m4init -min3ol -m2ino -1m4int -minth5o -minth2 -m2i3o2 -m2i3p -mir1ab4 -m2ir -mir1a -mi5r2acu -mira4c -m2is. -m4i2s3c2 -mi4se -4misem -mis3ha2 -mis2h -5missi -m2i4s1s -m3i4st. -mist2 -mis4t1in -m3i2s4t3s -mi2t -m5ita2n -mit1a -4mity -3m2i1u2 -5m2ix -4m1l -mlo5cut2i -m1l2oc -mlun4 -2m1m2 -mma4n4d -m1ma2n -mmand5er1 -mmande2 -m3medi -mm2e -m2m2ed -mmel5li -mmell2 -mmet4e -m1me2t -mm2ig3 -mm2in3u -mmis3 -mmob3 -m1mo -m5m2oc -mmor3 -mmut3a -4m1n2 -mn2i1f4 -mn1i -m4nin -mn2i5o2 -mnis4 -mno5l -1mo -4mo. -2m2oc -mod1 -mod5ifie2 -mod2i1f -mogast4 -m2o1g -mo2ga -mo3ga4s -mo4go -mo2g5ri -mo1gr2 -m5o2ir -m2oi -mok4i -mol3a -4molog. -mo1lo -mo1l2o1g -4molo2g1s -4molo2gu -mo3ly -m2o1m -mo4mis -mom1i -m4on -mon1a4 -4m4o2n2ed -mo2ne -mo4n1g -mo4no -mono1lo4 -monolo3g5i -mono1l2o1g -m4op -mophil5i -mo5phi4l -m1o2p4t2 -m3or1ab -mor1a -m3orat4 -mor4a2to -m5ord -mo5rel -m2or2e -3mor2i1a2 -mor1i -m5or1iz -mor5on -3morp -3mor2se -mo2r1s2 -mor5tal -m2ort2 -m4o3sp -5most2 -m4o3sta -2m1ous -mo4u2 -m1p -m3pa -m4panc -mpa2n -m4pant -mp4a2th3 -mpel5li -mpell2 -m5per3er1 -m4p2er2e -mper1 -mp4er3i -mpet5it -m3pet -mpe2ti -mphal5o -mpha2 -m4phe4 -m4p4h2l -m2pi -mp5i2d -m5p2ig -mp3i2ly -mp2il -mp1in -m3p2ir -mp3is -m3po -mpol5it -mp2o2t -mpov5 -mp3to -m2pt2 -mp5tr2 -m3pu -m5q -m3r -m4r2y -4m1s -m5sel5f -m5si -ms2ol4 -mtu4 -muc4k4e -m4uc -muck1 -mu2ff4 -muf2 -mul1t2 -m5unc -mun1 -mu5n2io2 -mun2i -mun3is -mus5c2o2 -m2usc2 -mu4se -mus5ke2 -mus2k2 -mu3til -mut2i -m1v -m3w -2my -5my3c -my4d4 -my3e -3my1i -5mys1t4 -m4y2s -3myt -n1a -2na. -na2c -na2ch4 -na5cious. -n2a1c2i2 -nac2io2 -nacio4u2 -na5ciou2sl4 -nac4te -na2ct -nac5t2iva -nac2t1iv -na5cular1i -nacu1l2a -nacula2r -na4d4a -nadi4 -nad4op -na2do -n2ae. -naf1fi2l4 -na2ff -naf1fi -nag4a -n4a3gen -5n4a5geri -nager1 -na4g2i -n5ago -5n4a3gr2 -5n2ah -5nail -n2ai2 -na5iv -n2ak2 -4na2l2ed -n2ale -n5al1g2 -n4al2i1a2 -n2a3ly4 -1n2a2m -3nam2e -na4m4n2 -na5n2as -na2n -n1an2a -nann2ot4 -nan5no -nan1n4 -nan4t2a -nan5t4ed -nan2te -nan4t2o -n2a5o -4n4ard -na2r -nar5tisti -nart2 -nar2t1is -nartis2t2 -n2as -nas5i -nas5p -na4s3s2 -nas5te2 -nast2 -nat5al -n2a2ta -na5ta3t4 -n4atee -na3the2 -n4a2th -nat4h4l -nati4 -n4a4ti. -nat5i2c -n4a2to. -na2to -na3t2om -na4tos4 -n2a2t4r2 -na5tur1i -n2a2tu -natu4r -naugh5ti -n2au2 -nau2gh -naugh2t -naus3 -3naut -naut3i -n2a2v -na5vel -n3b4 -nbarric5 -nba2r -nb2a4r1r4 -nbar3ri -nbe2au4 -nb2e -n3bea2 -nbe4n -nb4e1ne4 -nbet4 -nbit4 -n1c2a -n4cal. -nc1al -ncarn5at -n1ca2r -ncar2n2 -ncarn1a -ncel4i -ncen4t5ri -n1cen2 -n3cent -ncen1tr2 -n4cept. -n1cep -nce2pt2 -n3cer1 -nc2er4e -n4c4es. -n5ce2t -n5cey -n3cha2 -n2ch -nch4ie2 -n3cho -nch5o1lo -n3chu -n4cic -n1c2i2 -ncid5en -nci2d -n4c2i3f -ncip5ie2 -nc2ip -n1c2l2 -n4cles -nc2le2 -n3c2oc4 -n1c2o2 -nco5pat -nco1pa -n1cr2 -n2c1t -nc4t1in -nct4ivi -nc2t1iv -nct2o -n1cu -ncu4lo -n4cun1 -n4curvi -n1cu4r -ncur1v2 -nc4us4t2 -n1cus -4nd -n2da -n3d2a4c -n3dal -n4d4ale -n3d2a2m -nd3anc -nda2n -nde2 -n3dea2 -nde3c2i2 -n3dec -n1d2ed -nde4l -ndeleg4 -nd2ele -nd3enc -ndepr2e4 -nde1p -nde1pr2 -n3derl -nder1 -nde4s -ndes5cr2 -n5de2sc2 -n5dez -nd4hi -n4d1h -n1dic -nd2ic5u -ndid5a -n1di2d -n3die2 -nd5i2ly -ndil4 -nd4in4e -n2d1in -nd3ise -ndi4s1 -nd5is4i -nd5ism. -ndi2s1m4 -n2d5ity -nd3ler1 -n2dl4 -nd1li -n5d2oc -n1do -nd2or4 -n2do4u2 -nd5ou4r -ndrag5 -ndr2 -n3dr2a2m4 -n5dron -ndu4b -n1du -nduc2t5iv -n3d4uc -ndu2ct -n4dun1 -nd2we -n2d1w -n3dy1i -n2d2y -2ne. -ne3alo -nea2 -n3ea2r -ne2b3u -ne1b -5neck1 -ne4cl2 -ne2c2o2 -n5ec1t2om -ne2ct -2n2ed -3nedi -ne4du4 -neg3a -ne3go -5negu -n4eis4 -2n2ele -ne5l2i1a2 -ne1l2i4g -n4e2ly -ne2mo -4n1en -n3e4nd -neo3l -neon4 -ne2p -n1er1 -4n4e2r4ed -n2er2e -5neri4ng -ner3in -ner5o -ne4r4r5 -ner2v2 -ner2v5in -2n4es. -n1e2sc2 -ne3s2i1a2 -1ne4ss -n1est2 -nes3t4r2 -net3a -net3ic -ne4t2o1g -net1r2 -neuma5to -n4eum -neu1ma -neut5r2 -nev5er1 -n4ew -news3 -n4eys. -ne4y3s2 -n3f -n1fo4 -nform5er1 -n5f2orm -nform2e -nfor1tu5 -nf2ort2 -nfra2n3 -n1fr2 -4ng -ng2a -n4gae -n5gee -n3geri -nger1 -n5gero -n2gh4 -n2g2i -n5gic -ngi2o4g -n3g2io2 -n5glem -n2gl2 -n3glie2 -n5gl2io2 -n2g1n2 -n1go -n4g2r2y -n1gr2 -n1gu -n2gum -n1h2 -nh1ab3 -nha2 -nho4 -nh2y2 -nhy1d5 -n1i -4ni. -3niac -n2i1a2 -ni3ba -n4ic1ab -ni4cen2 -n4ice -4nicl2 -nic2t5a -ni2ct -ni4cul4 -n2icu -ni4d2i1o2 -ni2d -n2ie2 -ni4e2r1s2 -ni1er1 -ni2f4f -n2i1f -ni2ft4 -nif5ti -n2i2g -night5i -ni2gh -nigh2t -n3i2g1m4 -3ni2gn2 -nik5e -n2ik2 -n2il -ni2l4a -n3im1 -n4im2e -5ni1me2t -n4ine4s -nin4j -5n2in1n4 -n4in2u -5n4i1ol -n2io2 -ni1o4u2 -3nipu -n2ip -5n2iq -n4is. -n4is2k2 -ni2s4l4 -nis4o -n5i4s1s -nis5ter. -nist2 -niste2 -nister1 -nis5te2r1s2 -nit2ch4 -ni2tc2 -ni4te -ni3tho -n2ith -n4itos -ni5tra -n2itr2 -nit5res -nitr2e -ni3tri -nit4u4r -n2iv -n2iv4a -ni3vo -niv2oc4 -niz5en -n1iz -n1j -nj2a2m2 -n1ja -njur5i -nju4r -4n2k -nk5a2r -n5ker5o -nker1 -n3key -nk5i1f -nk5i2l -4n1l2 -nla4n2d5 -nl2a -nla2n -n3le -n1let4 -n3m -nm4a3ter4 -nmor5ti -n1mo -nm2ort2 -n1n4 -nne4 -nnel5li -nnell2 -nnerv5a -nn1er1 -nner2v2 -n3n1i -nni3ki -nn2ik2 -nnov3 -n5ny1i -n2ny2 -4no. -n5ob2i -no5b1il -no2b4l2 -no5blem -nobser4 -no4b1s2 -n5ocu1l2a -n2oc -no4di -n4o2d2y -noe4c -no4f2a -nois5i -n2oi -n4ois -n5ol. -no3l2a -nol4i -no2m3al -n2om -no1ma -1nom1i -no2mo -4no2ne -3n2oni4c -non1i -5nood -n2oo -nop5i -nora4t -nor1a -nor5di -nor4i1a2 -nor1i -nor4is -nor3ma -n2orm -n4oro -n2or4t2 -n4os. -nos4o -n4o3sp -not1a -n2ot -3note -n1o4u2 -n4oug4 -3n2oun1 -2nous -nou5v2 -nova4l -nove2 -no2v3el -no4v2el5e -n4ow -now5er1 -now3l -n3p4 -np2il4 -n1pl2a4 -np2l2 -npoin4 -n1po -np2oi -npo5l2a -n3p4os4 -npri4 -n1pr2 -n1q -n4que1f -nqu4 -n1r -nre4i4 -nr2e -nre3m -nres5t4r2 -nrest2 -4n1s -ns2c2 -n2s2c2o2 -ns3c2ot -n4s1cu -n5sec -nsec4te -nse2ct -n2s2es -n5seu -n3s2h2 -n2si -n4s3ib -n4sic -n5s2ick1 -n3s4i2d -n3sie2 -ns5ifi -ns2i1f4 -ns3i4ng -n2sin -n3s2io4 -n3s2is2 -ns1i2t -n4s3iv -nsolu4 -ns2ol -n5son -n4s2or2e -n4s3o2r2y -n3sp2ir -n3s2t2 -nsta4 -nstil4 -n3su -nsur4e -nsu4r -n3swa -nsw2 -ntab4u -n2t1ab -nt3age -nt1al -n4t3anc -nta2n -nt5a4nd -ntan5eo -nt4ane -n4t3ant -nt4ar2i1u2 -nta2r -ntar1i -n5tas1is2 -ntas3i -nt3as4t2 -nt1at -nt5a2th -nt3ati -nt5a2t1iv -n5t2au2 -n1te -n4tec -n4tee. -n4tees -n3te2l -ntend5en -nt2e4nd -ntende2 -n4te2o -n4ter. -nter1 -n3teri -n5ter2n2 -ntern5al -ntern1a -nter5nat -nth2 -n1the2 -nt4her5 -nth5in2e -nth1in -nt2i -n2t4ib -n4t3ic. -nt1ic -n5ticis -nt2i1c2i2 -n5tic1iz -n4ti4c5s4 -nt2ic4u4 -n3ti2d4 -n1tie2 -n4tify. -n1t2i1f2 -nti3fy -n3t2ig -nt5ilati -nti2l3a -n5t4ill2 -nt3i4ng -nt1in -nt5ing. -n1t2i3p -n4ti1pa2r -n4tis. -n2t1is -nt3i2s1m4 -nt3is2t2 -n5ti1t2io2 -nt3iz -n2tj -n1t2o -n3t2om -nton1i4 -n5top -n1tr2 -ntr2a3d -nt3ral -n4tran2t -ntra2n -n3trat -nt5re4s4s -ntr2e -n2t3ril -ntrol5ler1 -ntrolle2 -ntroll2 -n5trym -n2t4r2y -n1tu -n3t2u1a2 -ntub5 -ntup5li -nt2up -ntup2l2 -n5tu4r -n2ty -n2u -n2u1a2 -5n4uc -3nud -nud5i -nu3en -nug4a -n2u3i2 -nu4is -5nuk -n4u1lo -n3ult -nul1tim5 -nu1m2e -5numenta -numen4t -nu1men -5numer1 -5nu1mi -3nunc -nun1 -nu3tat -n5u2t1iv -nut2i -nu4t2o -nu1tr2 -n3v2 -nve2 -n2vel3 -nv4e1n4e -nven5o -nver2s5a2n -nver1 -nve2r1s2 -nvi4t -nvoc5at -n1vo -nv2oc -nvoc2a -n5w -nwin4 -nwi2 -n3w2om4 -n1wo -n2x4 -2ny2 -5nyc -nym5i2t -nyth4 -n1z2 -nzy4 -2oa2 -o5ace -o3a2ct -oad5er1 -oad5i -o3ag -oak5er1 -o2ak -o3ales -o2ale -oal4i -oal5in -o5al1it -oan4t -oa2n -oap5i -o2ap -oar5er1 -oa2r -oar2e -oar4se -oa2r2s2 -oast5er1 -oast2 -oaste2 -o2a2t5a -oat5ee -o4at5er1 -4oba -o1be4l -ob2e -ob2i -ob3i4ng -2o1b2i3o2 -ob3it -o3bl2a -o2bl2 -ob1li -4obo2 -ob3oc -o5bol -o5b2ot -o3bra -obr4 -obr2om4 -o2b5t -ob3ul -o3bus -2oc -oc2a -o4c1ab -o3cad -oc5ag -o5cal1li -oc1al -ocall2 -o4c5a2t1iv -oc1at -oc5a2to -4o3ce2 -o4cea2 -ocen5o -o1cen2 -ocess4i -o5ce4ss -och4e2 -o2ch -och5in -o3ch2o4n -o1cho -ochro4n -o3c2hr -o5chu -oci3ab -o1c2i2 -oc2i1a2 -oci4al -o1cl2 -o2c2le2 -o1cr2 -oc2r2e3 -o2ct2 -oc2te -oc1to -ocu4lu -ocum4 -oc5uo -ocu4ss4 -o1cus -ocus5si -oc3ut5r2 -o1cy -o5cy4t -ocyt5o -od3al. -o3de4c -o5de3g -ode4ga -o5d2e4n4d -o3dent -odes4 -od3ica -o4d1ie2 -od3iga -od2ig -od4il4 -od1i4s2 -o3d2is5i1a2 -od5it -5od1iz -od3li -o2dl4 -o2do -od5o1lo -o2d5ous -odo4u2 -o3dro -odr2 -od5ru -o2du -odu5cer1 -o3d4uc -o4duct. -odu2ct -o4duc4t1s -od3ul -o5dyt -o2d2y -oe3a2 -oe4b2i -oe1b -oe5cu -o2e4d -o5ee -oe5ic -o3elec -o2ele -oel1li4 -oell2 -oelo4 -oe3o4p -oep5 -o5eq -o3er1 -oes3t2 -o1et -o4et. -oet3i -oet4r2 -3oeu -o3ev -o3ex -o3flu4 -ofl2 -4o1fo -o4ful -o3fu -ofun4 -2o1g -o2ga -o3g2a2m -og5a2r5 -o3ga4s -o3gen1 -o5gey -o3g2i -o4g2io2 -og2n1a -o2gn2 -ogon1i4 -o4g2ot -o2gri -o1gr2 -o4g4ro -og4s2h -o2g1s -o2gu -o5gyr -o2gy -o1h2 -o3ha2 -oh1ab3 -o3he2 -oher4er1 -oher1 -oh2er2e -o3ho4 -oh2y4 -2oi -oi4c -o3ic. -o4i5ch -o2i4d -4oide -o2ig4 -oi5ki5 -o2ik2 -oi2l3er1 -oil5i -oin3de2 -o2i4nd -o3i4ng -oin4t5er1 -oin1te -oin4tr2 -o2i4o2 -4ois -o3i2s1m4 -oi4t -oit4al -oit1a -o2ith4 -o1j -ok4ine -ok1in -ok3l2 -ok5u -ol4a4c -ol2a -o4lack1 -o5lali -ol4al -ol4a2n -ola4n5d -ol5ast2 -ol4as -ol4at5er1 -ola4te -ol5ch -ol1c2 -ole2c4 -ol5e1c2i2 -ol5efi -ole1f -o3leo -ole4on -o3lep -o2l1er1 -o3lest2 -o3leu -o1l2i1a2 -ol3ica -o3l4ice -ol5ic1iz -oli1c2i2 -ol5ick1 -ol3i4c5s4 -ol5id. -oli2d -oli2e2 -o3li1er1 -ol5i4es. -o5l2i1f -ol4i4f3e -oli5go -o1l2ig -o5lin1a -ol3i4ng -oli5os -ol2io2 -ol5ip4 -ol1is4 -ol2it -olle2 -oll2 -ollim3 -ol1li -ol4lop4e -ol1lo -ol4ly1i -ol2ly -ol3mi -olm2 -o1lo -4o1l2oc -ol3o2i4d -ol2oi -o4lon1a -ol5on5el -olo2n4e -ol1or -o3los -ol1o4u2 -4ol1ub -o3lu1mi -o5lun1te -olun1 -ol3us. -oly3ph -o2ly -ol2yp2 -4olyt -2om -o1ma -o4m1ab -o2mac -o2mal -o4mane -o1ma2n -o3mas1t4 -o3mat -om4b2e -o2m1b -o2m2e4d -om2e -ome4g -o5meg5a1 -ome3li -o2m3en1a -o1men -omen4t -o3meri -omer1 -om1i -o3m2i1a2 -om1i2c -om2i1c5r2 -om4ie. -omie2 -om2il4 -om4i2ny2 -om2i4s1s4 -om2i2t -omm2e4 -o2m1m2 -om2n1a -o4m1n2 -omn1i3 -o4m2oi -o1mo -omoli3 -o2m4o4n -om5o2ny2 -o4mos. -omo2t5iv -om2ot -o2mo4u2 -om5p2il -om1p -om2pi -ompt5er1 -om2pt2 -ona4d -on1a -on3ai2 -o5nas. -on2as -onast5i -onast2 -on5a2t1iv -onati4 -4on2au2 -on1c -onc1at3 -on1c2a -on4cho -on2ch -5ond5a2r -o4nd -on2da -ond5ent -onde2 -on3der1 -on3dr2 -on5d2y -o2ne -4onea2 -onec4r2 -4o2n2ed -on1ee -on5ell2 -o3neo -o1n3e4ss -on1et -ong3at -o4ng -ong2a -on4gu -4on1h2 -4o1n2i1a2 -on1i -on5ia2r -2oni4c -onic5a -oni4c1al4 -on4i2d -on3ies -on2ie2 -on3i1f -o5n2i2g -o1n2io2 -on4k4s -o4n2k -4onnes -on1n4 -onne4 -on5o4di -on5oi -o1no4m1i -on2om -4o5nom1i2c -ono3s -o5not1a -on2ot -o4n1s2 -2ont -ont5a2ne. -ont4ane -onta2n -on4ter1 -on1te -onti5fi -ont2i -on1t2i1f2 -onton5 -on1t2o -on1t4r2 -on4tr2e -on5u4r -on2u -o5nus -onvo5lu -on3v2 -on1vo -onvo2l -on2z2 -2oo -oof3er1 -o2o1i -ook3er1 -ook3i -oo4le -ool5i2e2 -o2o4m -oon3i -oo2p -oop4ie2 -oop1i -o3o2p1t2 -oo4se -oost5er1 -oost2 -ooste2 -o2o2t -oot3er1 -ooz5er1 -o2oz -o1pa -o4p1ab -o5pali -opa5ra -o1pa2r -op4a2th5 -o5pec -ope4n2s4 -op1er1 -3opera -4opera4g -o1pha2 -o4phe4 -oph4ie2 -o5phi4l -op5hol -o1pho -o1ph2y -ophy5l2a -o3phy2l -op1i -op3ies -op2ie2 -op5i4ng -o3p2it -4op2l2 -oplast4 -o1pl2a -opl4as -o4p2oi -o1po -opol3i -op4o2n4 -o2p5o2ny2 -op5or1i -op2oun4 -opo4u2 -o2p5ov -op2p2l2 -o4p1p2 -op5pli -oprac4 -o1pr2 -op3ra2n -opr2e4 -opro4l -op5r2op -op5so -o2ps2 -1o2p1t2 -op2ta -op1u -o5qui3al -oqu4 -oq2ui2 -oqu2i1a2 -or1a -or5a2do -or2ad -ora4g -o5r2ai2 -or5al -4ora4ls -or2a2m4 -oran3e -ora2n -orat1or5 -ora2to -or2b3in -or1b -orb2i -or4ch -or1c2 -orch3i -or4du -2or2e -or5ead -orea4 -ore5a2r -ore5c2a -ore3f -ore3g -or3ei4 -oreo5l -or3e2sc2 -ore3s2h -or3e4ss -orest5at -orest2 -or5este2 -or5e2t2t4 -ore4v -5orex -or4fr2 -or1f -or5gn2 -or1g2 -or1i -4o4ri. -or3ia. -or2i1a2 -4oria2n -ori4c2i2 -ori5ci2d -or2i1en4 -orie2 -or3i1f -5or2ig -ori5ga -or4i4no -4orio. -or2i3o2 -or5ion -4orios -ork5a -or2k -2orm -orm1i -or3n4a -or2n2 -5or1nis -orn1i -or3nit -or3o2ne -o5r2oo4 -or5ose -or5oso -or1o4u2 -orre2l3 -o4r1r4 -orr2e -orres3 -or4sc2 -o2r1s2 -or4sey -or2se -or4sti -orst2 -2ort2 -ort3a2n -ort3at -ort3er1 -or5t4es. -or2t2es -or3th2i -or4t2h2r -or4tit -or2t3iz -or4t1or -or1t4o -or5tra -ortr2 -ort3r2e -4or1u -or4un1 -or2y5p -o2r2y -o3s2a5i2 -os3a2l -osa2r5 -o1sc2 -os4ca -os4ce -o2s2ch2 -o4s1c2i2 -osclero5s4 -oscl2 -osc2le2 -osc2ler1 -o3sec -osec3u -ose5g -os5enc -o3se4n2s4 -os5eo -oser4 -o2set -os5eu -o3s2i1a2 -osi4al -osi4a2n -os5i4de -os4i2d -o3si1er1 -osie2 -os5i1f4 -o2s1in -o4s1is2 -o5ske2 -os2k2 -o5son -o3soph -os2o2p -os3o1po -4osp -o3spec -os1pi -os4sa -o4ss -oss5a2r -os4s2it -4osta -ost2 -ost5age -os4ta2r -os5tee -oste2 -os5ten -osten5t -ost5ica -ost1ic -os3til -o5stom2e -ost2om -ost3or -4osu -os1u4r -2ot -ot3a4g -o5tal1it -ot3a2m -ot4an3ic -ota2n -otan1i -o3t2ap -ot4a1t2io2 -o5t2a5v -o3t4a4x -o4t4ed -oter4m -oter1 -ot5est3a -o2t2es -otes4t2 -4oth -othal2a2m5 -otha4 -othal2a -oth5er3in -ot4her1 -othe2 -o5therm -otherm5a -o5thor -o5t2i2a2 -o5t4ill2 -5ot5in1iz -ot1in -ot2in1i -o2t4iv -o3t2iva -o5tivi -o1t2o -o5to2n4e -o4tor2n2 -ot1or -o4to4u2 -4o1tr2 -otur1i4 -otu4r -oty3le2 -o4u2 -5o2u3a2 -oub2 -ou5br4 -ou5c2a -o4uc -ou5c2o2 -oud5i -4oue -ou3e4t -oug4 -ou5ga -ought5i -ou2gh -ough2t -ou5g2i -oul4t -oult5i -ou3m -2oun1 -ou4n2d -oun2d5a -ound5e4l -ounde2 -oun5gin -ou4ng -oun2g2i -oun3tr2 -oup5li -o2up -oup2l2 -our3er1 -ou4r -our2e -ou5sa2n -2ouse -5ous2i1a2 -ou4ss4 -out5is2h -out2i -ou2t1is -ou4v5a -ouv2 -o1v2a3le -o5va2r -4ovati -ov5el3i4ng -o2vel -o4ver. -over1 -o5ver3b -ove2r3s2 -ov4ete -ovid5en -ovi2d -o1vis -ovis5o -o2v5os -o1vo -ow3ag -ow3a2n -o5w4ay -owd4i -ow2d3l4 -ow1el -owel5li -owell2 -ow5ha2 -owh2 -owh2ith4 -ow1i2 -ow5in -owi5n2e -ows4 -ow5s2h -ow5sl4 -ow5y2 -o4x -ox3i -oxic5ol -oxi4c -ox2i2c2o2 -ox5o -2oy -oy5a2 -o4y2s4 -2oz -o1zo -oz2o5i -o3zy1g -4pa. -pac4te -pa2ct -pa5do4u2 -pa2do -pad4r2 -pae4s4 -pa3ga2n -4pag1at -pag4ati -pain2 -p2ai2 -4pairm -pa2ir -pa5la2n -pal2a -pal3in -pa3lo -p4a4ls -pan5a2c -pa2n -pan2a -pan1e -pan3i -p4a4pa -p2ap -pa3pe -pap3u -pa3p4y -1pa2r -para5s2 -par3l -pa3r2oc -pa3rol -par5o4n -1p4as -pass5ive -pa4ss2 -pas4s1iv -pas1t2 -pas4t1in -p4a3ter1 -pa2t3i4n -p5a2to -pat4ric -p2a2tr2 -pat1ri -pa5tric2i1a2 -patri1c2i2 -5p2au2 -paul5e -pa2u3p -pa5vi4l -p2av -5paw -pawk4 -paw5ki -2p1b -p1c4 -p5d2 -2pe. -pear4l5i -pea2 -pea2r -pe4c2o2 -pec4tu -pe2ct -2p2ed -5ped3a -3pede2 -3pedi -ped3i4s1 -3pe4d1s2 -pe2du -p4ee -pe2f -4p2ele -pe5le3o -pel5v4 -pen4at -pen1a -5p4enc -pend5er1 -p2e4nd -pende2 -pen5dr2 -pen4ic -pen1i -3p4en1n4 -pens5ati -pe4n2s -pens2a1t -p4en5u -pe5on -5p2er1c2 -per1 -per3cent5 -per1cen2 -4p2er2e -perem5i -p4eri -5p4er3n2 -p3eron -per4os. -per5t1in -pert2 -per2t5is -per3v2 -p4e2r2y -2pes -pe4s4s3 -pes5til -pe2sti -pest2 -3pet -pet5all2 -pet2a -pet3en -pe2ti -pet3r2 -pe4wa -4pex -p1f -p5g -2ph. -4phae -pha2 -pha5g2e4d5 -ph5al. -ph2a2n -phant5i -phan4t -phe4 -ph5esi -ph3et1 -3phib -4ph1ic -1phi4l -ph1i4n -ph1is -ph2i5th -p4h2l -1pho -4pho2bl2 -4ph4o2n2ed -ph2o4n -pho2ne -3phor -ph5or1iz -phor1i -ph4os3p -ph3o4u2 -3phra -p2hr -4p2h1s -1phu -ph2u5i2 -2phy. -ph2y -3phy2l -4pi. -3pia2r -p2i1a2 -4pica -p5i4c1al -p2i3c2o2 -p2i4cr2 -pi2ct4 -p2ie2 -p4i1es2t2 -pi5eti -p2i1et -p5ifie2 -p2i1f -pi2g3n2 -p2ig -p2il -3pile -pil2l5in -pill2 -pil1li -5pilo -pi3l2ot -pim2 -pin4e -pin5et -3p4inge -pi4ng -p4in1n4 -5p4i4n1s2 -3p2i1o2 -pip4a -p2ip -pi4pe -5p2iq -pir5a4c -p2ir -pir1a -pir4t2 -p4is. -p4i2s3c2 -p2i4s2s -pis1s5a -pis5til -pist2 -pis4tr2 -p2itu -2p3k2 -p2l2 -1pl2a -pla5n1o -pla2n -plant5er1 -plan2te -plas5t2i1c2i2 -pl4as -plast2 -plast1ic -pla5t4o -4p4le. -4pled. -p2l2ed -3pleg -3plen -2ples -4pli2s1m4 -pl1is -4plist2 -plu2m -plum4b2e -plu2m1b -plumb5er1 -p4ly -2p1m -2pn -pnos4 -1po -4po. -po3c2a -p2oc -3pod -4pof -2p5o2i4d -p2oi -pois5i -p4ois -po5lem1ic -po4ly1 -pol4y3s -po1ma2n5 -p2om -po1ma -pom4e -p4o2n -pon4a2c -pon1a -pon4ce -pon1c -pon4i4e2 -pon1i -3pon3i1f -pon5ta -p2ont -2po2ny2 -po4pa -po5ple -p4op2l2 -4pora2to -por1a -por3ea4 -p2or2e -4po2r4ed -por3i4f -por1i -por3p -3p2ort2 -por5tie2 -3p4os -po4s1s2 -po1te -p2ot -poul1t5e -po4u2 -poul4t -pound5er1 -p2oun1 -pou4n2d -pounde2 -pout5er1 -p5ox3i -po4x -5p2oy -4p1p2 -p1pa2r3 -ppar1at5 -p4p4ene -p3pet3 -pph4 -ppi4c -p4p2l2ed -pp2l2 -p5p2ler1 -p5p1let -ppres2s5o -p1pr2 -ppr2e -ppre4ss -ppr4ob5a -1pr2 -prac1 -pr2a5d -pra2r4 -4p4re. -pr2e -pre1b3 -p2r4e1d -pr2ef5er2e -pre2fe -pre1f -prefer1 -prel5a4te -pre1l2a -3prem -pre5mat -pren3 -pres3a -pre5scin -pre2sc2 -pres1c2i2 -p3rese -5pressi -pre4ss -5pri1c2i2 -pri4es -prie2 -4pri4m -pring5er1 -pr4inge -pri4ng -pr4in2g5i -4pr2i3o2 -p5r4i1ol -pri4os -pri2s5in -pr2i4v2 -4pr2iva -4p4ro. -pr4o3bo2 -p3roc3a -pr2oc -pro4ch -pro1l -pron4a -pro4ph5e4 -pr2op -pro3pyl5 -pro2p4y -pro3r2 -pros4i -pros5tr2 -prost2 -pr4o3th -pr2ot -4p2r2y -2ps2 -p3sac -ps4al5t -psa2l -p3s2h -p1si -p5sin. -p2sin -p1s2o3m -p1st2 -psul3i -p1s2ul -3psy1c -ps2y -2pt2 -p2t3ab -p4tad -p4ta2n -p2ta2r -pt5ar1c2 -p1t4ed -p5ten1a -pt5en1n4 -5pte2r2y -pter1 -p5tet -pt4ic -p5tie2 -p3til -p2t3in -pt4ine -p3tise -p2t1is -p5tisi -p5t2om -p4tr2 -p1tu -pub1 -pu5b2e -p4uc4 -pu4ch4 -pudi4c -pu5er1 -puff5er1 -puf2 -pu2ff -puf1fe -pu4la2r -pu1l2a -pu5lar. -pu5l1is -pul2i -p4u4m -pu1m4o -p4un1 -pu4n4a4 -3punc -pun5g2i -pu4ng -pun3i -pun2t -pu3pi -p2up -pur5b -pu4r -pur3c2 -p4us -push4ie2 -pus2h -pu3tat -p5u5t1is -put2i -pu3tr2 -4p1w -2p4y -py3e -3py1g -3pyl -pyr3e -py5t -4qf -qu4 -5qu2ak -q2ua2 -4qua2r -qua5t2io2 -2que. -3quer3a -quer1 -4qu2er2e -4qu4es. -1que4t -5quin1a -q2ui2 -5qu2ir -3quito -4quitu -4ra. -r2a3ba -r1ab -5r2ab2e4 -3r2a3bin -r2abo2 -ra3bol -rac4a -r2acu -rac5u1l2a -ra5cu1lo -r2ad -ra4de -rad4in4e -ra2d1in -ra2g5o4u2 -ra3gr2 -3raill2 -r2ai2 -ra5ist2 -4ral2i1a2 -r2a3ly4 -r5a4m1n2 -r2a2m -ra3mu -r4andi -ra2n -ra4nd -ran5dis2h -randi4s1 -ran4du -ra5nee -ran4gen -ra4ng -ra3n2i1a2 -ran1i -ra3n2oi -ran1o -ran2t -ran5t4ed -ran2te -5ran3te2l -rant5in -rant2i -ran1t5o -rapol5 -r2ap -ra1po -rap5to -ra2pt2 -4rar1c2 -ra2r -rar2e2 -rar3e1f -rar5ia. -rar1i -rar2i1a2 -ras2 -r2as3c2 -r2as2e -r4as2k2 -r2a3so -ras1s5a -ra4ss2 -ras2s5in -r4as5te2 -rast2 -ra5t2a3p -r2a2ta -ra5ta3t4 -rat5eu -rath4e2 -r4a2th -ra2t3i1f2 -rat4in. -ra2t3in -ra5t2oc -ra2to -5r2a5tol -4r4at2om -ra4tos4 -ra5t2u1i2 -r2a2tu -rat5u4m -rat3u4r -rav5ai2 -r2av -rav5eli -ra2vel -rav3i2t -rawn4 -ra3z2ie2 -r2az2 -raz1i -r1b -r2ba -r4bag -rb3ali -rb1a2n -rba2r3 -r2b2e -rbe5c -r3bel -rbel5o -rb3ent -r4be1s2 -rb2i -rbic4 -rb2ic5u -r2bin -r5bine -rbit1 -r2bos -rbo2 -r4bum -rbu5t4 -r1c2 -rcant5 -rca2n -rca4s -r4c2ele -rce2n5er1 -r1cen2 -rc4ene -rcen5ten1a -r3cent -rcen1te -r2ces -r3ch2a3i2 -r2ch -rcha2 -rch3al -rch5ar4d -rcha2r -rch5ate -r3cheo -rche2 -r4ch1er1 -rch4i1er1 -rchie2 -r4ch1in -rch3is -r3chit -r3cil4 -r1c2i2 -rci5n2o1g -rc2ino -rcis2 -rciz4i2 -rc1iz -r2cl2 -r4c2le2 -r5clo -rco1lo4 -r1c2o2 -rcrit5 -rcr2 -rcriti4 -r2ct4 -rc5ti -r5d2a2m -r4d1a2n4 -rd4an. -r2da2r -r5de4l -r3de4n2s -r4des -rd5e4ss -rd5ia2n -r1d2i1a2 -r4die2 -r5d2ig -r2d2in -rd3i4ng -r3d2i3o2 -rd1i4s2 -rd5ler1 -r2dl4 -rd3li -r4dol -r1do -r2d5ous -rdo4u2 -r2e -4re. -rea4 -r4ea. -reac2t5iv -re1a2ct -re3af -re3a4g -re5alt -re5a2m1b -re2a2m -re3an5i -rea2n -re5ant -re5asc2 -re2as3o -r5e2au3 -3re2av -r5ebrate -re1b -re2br4 -reb1ra -re4b5uc -re3c1al -rec2a -rec4ce -rec1c4 -re3ce -reced5en -re2c2ed -re3cede2 -re3cha2 -re2ch -reci5si4 -re1c2i2 -r4e1c2r2 -rec4t3r2 -re2ct -re3cu -2r4ed -re1de2 -re3di4s1 -re4dol -re1do -re1dr2 -reed5i -re2ed -ree3m -3reer1 -re2fe -re1f -re3fin -re5gali -re5gra -re2gr2 -r2e3g4r2e -reg3ri -re3g4ro -reg3ul -rei4 -r4e3i1f -re1in -r4e3is -reit3 -reit4i -re1l2a -r2e1le -4r4e4l2ed -re3l2i1a2 -rel3ic -re5l2ig -rel2i4q -rel3li -rell2 -r5em. -rem5ac -rema4n4d -re1ma2n -rem5a2to -r3em1p -rem5ul -ren1a4 -ren5at -r4endi -r2e4nd -r4ene2 -ren4es -r4en1i -ren3ic5 -ren4it -ren4ter1 -ren1te -re5num -r4en2u -re3oc -3re2o1g -re5ol2a -reo2l -re3oli -3reo1s2 -re1pe -re4per1 -re5ph1 -rep5i2d -re3pin -re3ple -rep2l2 -r2e4pr2e -re1pr2 -re1q -rer4a -rer1 -r2er2e4 -re5rea4 -r2e3r2u -2r4es. -re3scr2 -re2sc2 -re3se4l -re3sem -re3ser1 -res5ist2 -res1is2 -re5s1it -re3spe -r3esq -re5stal -rest2 -rest5er1 -reste2 -re5s1tu -3reta2r -ret2a -re3ten -re4t4er3 -re5term -re1t2o -re5ton -re3tra -retr2 -r2e3tr2e -re5t1ri -re3tu -re3un1 -reu4r4 -re1v -re2v3el -revi4t -r1f -rf4l2 -rfu4m -r3fu -r1g2 -r4gag -rgal4 -r2ge -r5gee -r4g4ene -r3gen -r3ge4o -r3ger1 -rg5li -r2gl2 -rgu5f2 -rh2 -r5hel4 -rhe2 -rhe5o2l -rhos4 -3r2h2y -4ri. -ri3a2m -r2i1a2 -ri5ap -2r2ib -r4i3bo2 -ric2a5t4u -ric1at -2r4ice -rich5om -r4i2ch -ri1cho -rick4en -r2ick1 -ric4ke -r4icl2 -ri5cli -ri3col -r2i2c2o2 -ri5cor -ri4cra -r2i1cr2 -2r2icu -rid4al -ri2d -rid1a -rid4e -ri5el -rie2 -ri3er1 -ri2es -rift5er1 -r2i1f -ri2ft -rif5tie2 -5rifug4a -ri3fu -ri5g2a2m -r2ig -rig5ant -riga2n -ri5l4a -r4ile -rill5er. -rill2 -ril2ler1 -rill5in2g1s -ril1li -ril2lin -rilli4ng -4rim. -ri2ma -rim2a4g -ri1m5a2n4 -rim3at -r4i2m1b -ri1men4 -rim2e -4ri2m1m2 -4ri4m1s -rin4e -r4inet -ring5ie2 -r4in2g2i -ri4ng -rink5er1 -r4i4n2k -r4ino -ri4n4s2 -rin3s5i -rin4t5er1 -rin1te -r2i3o2 -ri2o4g -5rio2ne -ri4op -ri5or -ri5p2a -r2ip -ri5p2ie2 -rip5lica -rip2l2 -r2i5r -ri2s4c2 -r4is4is2 -r2is1p -ris4pa -ris4pe -ris5ter1 -rist2 -riste2 -4risti -r2i3ton -r5it5r2 -r2i4v -riv4al -r2iva -ri5vall2 -riv5eli -ri2vel -riv3en -riv3i4l -5r2i5zo -r1iz -r1j -r2k -r5kas -rk5ati -r5kell2 -rk5en1i -rk1er1 -r3ket -r3key -r3ki1er1 -rkie2 -r5ki1es2t2 -r5k2in. -rk1in -r5k2i4n1s2 -rks4m2e -r4k1s -rk2s1m4 -r1l2a -rlat3 -r1le -r3l4ic -r3l4ine -r5li4n1s2 -r4l1it -r1lo -r3mac -rma5ce -r5mad -r2mal -r4manc -r1ma2n -r4man1o -r4mar1i -r1ma2r -r4ma2r2y -rm4as -r4m3ati -rma5t2oc -rma2to -r5m2a5tol -rme2a2 -rm2e -r2m1ic -rm4ica -r5m2i2d -rm4ie2 -r5m2ig -rm2il5 -rmin4e -rm3i4ng -r4ming. -r4mi4te. -rmi2t -r3m2oc -r1mo -rmol4 -r1mu -rmu3l2i -r2n2 -rn3ab -rn1a -r3na2c -r5nad -rn5a2r -rn3ate -rn5a2t3in -rnati4 -rn5e2dl4 -r2n2ed -r3nel -r3ne4ss -rn5est2 -r3net -r3ney -r5n2i1a2 -rn1i -rn5ib -r3nic -rn3in -rn4ine -r1nis -rn3ist2 -rn2i5v -rn3iz -rn5n4 -r3n2oc -r5n2o1g -rnt4 -r5n4uc4 -rn2u -r5nut -4ro. -ro4b2e -rob3le -ro2bl2 -ro5br4 -5roc1c4 -r2oc -ro3cu -r2od -ro3do -ro3dy4n1 -ro2d2y -ro1fe -ro3gn2 -r2o1g -4r2oi -ro3i4c -ro2i4d3 -ro3l2a -r4o2l2ed -rol5ite -rol2it -ro3ly -romant4 -r2om -ro1ma -ro1ma2n -ro5mel -rom2e -ro3m2i2t -rom1i -romole2c5 -ro1mo -rom4p -ro3mu -ron4a2c -ron1a -4ronal -ro5nate -ron5ch -ron1c -ron4do -ro4nd -ron2g5i -ro4ng -r5onm2e -ron3m -ro1no -ron4ton5 -r2ont -ron1t2o -r2oo4 -1ro2o4m -5ro2o2t -r2op -4rop. -ro3pel -rop4in4e -rop1i -r4o1pr2 -r5opte -r1o2p1t2 -ror5d -4r2or2e -r4osa -ro3s2i4a2 -ro5s2ol -4ro4ss -ro5stat -r4osta -rost2 -ros4ti -ros5tit -ro3tat -r2ot -ro1te -ro4ter1 -ro3tu -5r4oue -ro4u2 -roul3 -round5er1 -r2oun1 -rou4n2d -rounde2 -rou5sel -r2ouse -4rou4ss4 -r4out -r4ow -row3er1 -4ro4x -rpas2s5in -r1p4as -rpa4ss2 -rp3at -rpe2 -r3pent -rp5er. -rper1 -r2ph -rph5e4 -r3phol -r1pho -rp3i4ng -rp5is -rpol3a -r1po -r2p5o4u2 -rpr2e4 -r1pr2 -rpre4t5er3 -r3pu -r1q -4r1r4 -rr2a4h -rran5g2i -rra2n -rra4ng -rr2ap4 -rre2l -rr2e -r4reo4 -rrhe3 -rrh2 -r3ri -rric4 -r2r2icu4 -rri4fy. -rr2i1f -rri3fy -rr4in5ge -rri4ng -rri4os -rr2i3o2 -rrob3 -rr2o1g5 -rr2o4t -r5ru -r2r2y5 -r3ry1i -r3rym -2r1s2 -r4sa4g -r2sa2l -r5sal1is -r5sal1iz -r2sa2n -r4sa2r -r2se -r3se2a2 -r3sec -rsel4 -rsell5 -rs3er. -rser1 -2r1s3e2r1s2 -r3set -r3sha2 -rs2h -r3shi -r4shie2 -r5s2i2a2 -r4s3ib -r5sie2 -r4sil -rs3i4ng -r2sin -r3s2io4 -r4s1it -r4s3iv -rs5li -r2sl4 -rst1or4 -rst2 -rstrat4 -rstr2 -r3su -r4sus -rswea2r4 -rsw2 -rswea2 -rt2 -r2t3ab -rta4g -rt3age -r3ta2r -r4tar2e -r2t3c2 -r1t4ed -r4te2dl4 -r3te2l4 -r5t2e4nd -rt3en1i -r5ter3er1 -r2t2er2e4 -rter1 -r5tet -r5teu -r4th4ene -rthe2 -rth2i -rth5ing. -rth1in -rthi4ng -rth3ri4 -r1t2h2r -r1t4ic -r4ticl2 -r5t2i1et -rtie2 -r5ti2l3a -r5t4ill2 -rtil5le -rt5i2ly -r2t1in -r3tin1a -rt3i4ng -r3titi -rti5tu -r2t3iv -r2t1iz -rt5let -r2tl -rt3li -r1t4o -rto5l -rt5ri2d -rtr2 -rt5s2i -r4t1s -r1tu -r4tus -rtwis4 -rt1w -rt2wi2 -r2u3a2 -r4ub2e -rub3r4 -ru4ce -r4uc -r2ud -rue4l -r4uf2 -ru3in -r2ui2 -ruis5i -ru2l -r4um2e -r4u1mi -ru4m2or2e -ru1mo -run4c2l2 -run1 -run1cu4 -runcul5 -ru4n2d4 -run2e -ru5net -ru4n4g -run4t -r2u2p -rup5lic -rup2l2 -ru3pu -rur4i -ru4r -rus4p -rust5at -r4ust2 -rust5ee -ruste2 -rus5t4ic -rus4t5u -ru3tal -ru3t2i -r1v2 -r4vanc -rva2n -r2ve -rvel4i -r2vel -r3ven -rv4e1n4e -rv5er. -rver1 -rv5ers. -rve2r1s2 -r3vest2 -r3vet -r3vey -rvi4t -r1w -2r2y -ry5er1 -5ry2g1m4 -ry1g -ry4go -ry2m4b -3ryn1go -ryn1 -ry4ng4 -4ryn4gol -ryp5a -r2yp -ry2t -ryth4i -r2z -2sa. -2s1ab -s3a2bl2 -5sack1 -sac4q -s3a2ct -sac4te -sad5i -sa2d5o -5sae -sa4g -3s2ai2 -sain4t -5s2ak -sa2l -sa5l4ac -sal2a -3s2ale -sa3lie2 -s4al4t -sa3lu -s2a4m -sa5min -sam1i -sa1m5o -sam2p4 -san3a -sa2n -san4d2ed -sa4nd -sande2 -s4an4e -san5ga2r -sa4ng -sang2a -san5i3f -san1i -2sant -sant5ri -san1t4r2 -s3ap -sa2p3r2 -sa2r5s2 -sa2r -3sas. -sa4s3s2 -sassem4 -s2a1t -sa2te -s5a2t1iv -s5a4to2r2y -sa2to -sat1or -s2a2t1u -1s2au2 -sau5c2i2 -s4a4uc -sau4r5 -savi2 -s2av -sa3vo4u2 -sa1vo -4s3b -s4bei -sb2e -sbe4s2 -s2by3 -sc2 -s1ca -sca5len -sc1al -sc2ale -s1c2a2p -scar4c2 -s1ca2r -sc2av3 -s1ce -s4c2ed -4scei -4s4ces -s2ch2 -sci2d5 -s1c2i2 -s2c2o2 -scof4 -s4c2oi -3s4cop4e -5scopic -scop1i -5scripti -scr2 -scr2ip -scri2pt2 -2s1cu -4sc4u4ra. -s1cu4r -scur1a -4scuras5 -2s1d2 -2se. -se2a2 -s4e2a2m -seas4 -sea3w -sec4a -sec5a2n -se2c2o2 -secon4 -2s2ed -se4da -sed4it -3se2ed -3sei -se2i3g2 -5sel2a -4s2ele -se3lec -selen5 -5self -2s4e1m2e4 -sem2i -semi5d -se1m4o -se4n5g -3se4n2s -sen5sati -sens2a1t -sen5sor1i -sent5ee -sen1te -5sen4t3m -seo5l2o1g -seo2l -seo1lo -se2p -se1p3a -sep4si -se2ps2 -3se2pt2 -sep3ti -ser4a2n -ser1 -se5r4en4e2 -s2er2e -ser3en -ser4t4o -sert2 -4ser1vo -ser1v2 -s2es -4s4es. -se5s2h -s5esta -sest2 -1set -5s4eum -3sev -sev3e4n -se1wo4 -3sex -sex1o2 -3sey -2s1f -sfac2t5o -s1f2a -sfa2ct2 -sfi4 -sf2or5e -s1fo -sfra2n5 -s1fr2 -2s1g4 -s2h -4sh1ab -sha2 -sh4abi -sh1er1 -she2 -sh5et1 -shil5li -shi4l -shill2 -sh5i1ne4ss -sh1in -sh2in2e -shine4s -sh3io2 -5sh2i2p -s3h2o4n -4shu4 -sh4y2s4 -sh2y -si4all2 -s2i1a2 -siast5 -4s1ib -s3ic1at -3sic1c4 -2s5icl2 -s2i4cu -si5cul -s4i2d -4sid. -si4de -side5l -sid3en -si1d5eri -sider1 -4si4d1s2 -5sid5u4a2 -si1du -si4e2r1s2 -sie2 -si1er1 -s2i1f4 -si2f5f -s2i4g -1sili -sim4p4ly -sim1p -simp2l2 -2sin -s2ine -sin5et -5sing5er1 -s4inge -si4ng -s2in3i -5s4i4n2k -si5nol -s2ino -si3nus -s2in2u -1s2io4 -4sio. -si5o5s -3s2ip -si4pr2 -s1is2 -4sis2h -4si2s1m4 -s4ist3a -sist2 -si4s1t3o -s1it -si4te -sit5om -4s1iv -5s2iva -s1j -s2k2 -4sk. -s5ka2r -ske2 -s3ket -s5key -s3ki1er1 -skie2 -s5ki1es2t2 -sk5i2ly -ski2l -sk5ine4s -sk1in -4s4k1s -s3ky3l -2sl4 -slan2g5i -sl2a -sla2n -sla4ng -s1lat -3sl4au2 -slav5eri -sl2av -slaver1 -s2le -s5lea2 -s3let -s5ley -s3l1it -s1l2o3c -slov5 -s5l4uc -2s1m4 -s3ma2n -smas4 -s3men -sm2e -sm2i3g -3sm2ith -smi2t -smo4d1 -s1mo -smu5ta1t2io2 -s1n2 -s2n1a -2so. -2s3od -so2d3o -so2d2y4 -3soe -4s3o2i4d -s2oi -s2ol -sol3a -so5l4a2n -so2l4er1 -so3lic -3solve -solv5er1 -1s2o2m -soma5to -so1ma -so3mat -3so2me. -som2e -so5met1e -so1me2t -so3mo -s2on1a -son5at -s4o2ne -son5or -s2o2p -4sor3ie2 -sor1i -5sor2i3o2 -sor4it -s5or1iz -sor3o -s3o2r2y -sos4 -4sose -s4o5th -s2ot -3so4u2 -sov5e -so3vi -spas1t4 -s1p4as -spens5a -spe4n2s -4speo -3sperm -sper1 -s5pero -spers5a -spe2r1s2 -sph2 -s3pha2 -3spher1 -sphe4 -spic5ul -sp2icu -s2pi2d -sp5id. -s5pi1er1 -sp2ie2 -spil4l2 -sp2il -s2pin -sp3i4ng -sp2i5n1i -spital5 -spit1a -s1p2l2 -sple2 -s4p4ly -s2po -5sp2om -spon5g2i -sp4o2n -spo4ng -3spo4n1s2 -3spoon -sp2oo -spr2u5d -s1pr2 -s4p4y -s1r -sr2e2 -sreg5 -sre1p5u -sre4s -4ss -s1sa -s5s2a4m2 -s1sel -s5se4n5g -s3sent -ssent5er1 -ssen1te -ss3er. -sser1 -s5seri -ss3e2r1s2 -s5seu -s3sev3 -s3s2i1a2 -s1sic -s1s2i1f4 -s2s1in -ss4in. -s4s2ine -ss4is. -ss1is2 -s3s2it -ss4ivi -s4s1iv -ss5li -s2sl4 -s2s3m4 -s4s1n2 -s1so -ssol3u -ss2ol -ss4ol1u4b -s4s2or2e -ssor5ial -ssor1i -ssor2i1a2 -ss5po -s1su -ss3w2 -st2 -4st. -s2t1ab2 -sta3bi -4s1t2ak -s4t2ale -stan2t5iv -sta2n -stant2i -s3tas. -5stat1i2c -s2t3c2 -ste2 -ste5a2r -stea2 -ste5at -s4te1b -s4tec -4s1t4ed -s4te2dl4 -s4te4d1n2 -4s2t2er2e4 -ster1 -ster4i1a2 -s1teri -s4tern. -ster2n2 -s3tero -st5es4t2 -s2t2es -s1th -s4tha4 -s4thu -s3t2i3a2 -3st2ick1 -st1ic -s3t2ic1u -stil5ler1 -st4ill2 -s4ti2ly -st3i4ng -st1in -5s4t2ir -s5t1iz -4s2tl -st3ler1 -st3li -s4toe -3ston -sto2n4e3 -ston4ie2 -ston1i -s5torat -st1or -stor1a -st4or5ia2n -stor1i -stor2i1a2 -s4tose -s2to4u2 -s4tr4ay -str2 -str2e4 -strep3 -3st4r4uc -str2u5d -2s4t3s -s1tu -s4tud -stu4m -stur4e -stu4r -4st1w -s4ty -1styl -4su. -su5a2n -s2ua2 -su4b1 -su2b1t2 -su2ct4 -s4uc -sud4a -su3e4t -su2f3f -suf2 -sug3 -3s2ui2 -su2i5c -su5i4ng -1s2ul -s4u2m -su1m3i -su4n4a4 -sun1 -su5pe -s2up -su3pin -supra3 -su2pr2 -sur4as5 -su4r -sur1a -sur3c2 -s4ur1g2 -sur3p2l2 -su5su -su5z -2s3v -svers5a -sver1 -sve2r1s2 -sves4 -sve2st5i -svest2 -sw2 -5swee -swel4l5i -swell2 -4sw4e2r4ed -swer1 -sw2er2e -2s1wo -s2y -4sy. -sy4b2i -s4y1b -sy1c -sy4ce -sy4c2hr -sy2ch -sy4d4 -1syl -3syn1 -syn5e -sy5pho -s2yp -sy2ph -syr5i -2ta. -2t1ab -ta5blem -ta2bl2 -3tabli4 -t2abo2 -ta3bol -ta4bo4u2 -t4a3ce -ta5ch2om -ta2ch -ta1cho -ta3ch2y -ta4ci2d -t2a1c2i2 -t5ade -tad4i -5t2ad1j -ta5d2or -ta2do -tad2r2 -tae5n -taf4 -tage5o -ta5g2o1g -3ta2gr2 -3t2ah -1t2ai2 -3tail -2ta2ir -t4ais -1t2ak -tal2c2 -tal5ent -t2ale -ta5lep -t4al2i1a2 -t4al1in -tal4l3a -tall2 -5tal1lu -t2alo4 -t2a3ly4 -tam5ar1i -t2a2m -ta1ma2r -5ta3me2t -tam2e -tamor2ph5 -ta1mo -ta3morp -tan5at -ta2n -tan2a -tand5er1 -ta4nd -tande2 -t4ane -5tanel -tan5ie2 -tan1i -t5an1iz -ta2nt5a2n -tan2t2a -t4a4pa -t2ap -1tard -ta2r -tar5ia. -tar1i -tar2i1a2 -tark5i -tar2k -tar3n2 -3t2a4r1r4 -tas3i -t3a2s1m4 -5ta4ss2 -tas4t2 -t2a3sta -tast5i4c -t4ateu -3ta2t1is -t4a2to. -ta2to -tat4o4u2 -t2a2t4r2 -ta1t3ut -t2a2tu -tau3t2o -t2au2 -t5awa -tawn4 -t4a4x -4t3b -2tc2 -t1ca -tcas4 -tch5e2t2t4 -t2ch -tche2 -tchet1 -tch5u -4t1d4 -4te. -te5cha2 -te2ch -5tec2h1n2 -te3cr2 -t4ed -te5d2a -4ted1d4 -4te1do -4tee1i -te2g -5tegic -te1g2i -t3ego -te2g1r2 -teg3u -tei4 -te2l -4t4e4l2ed -t2ele -tel5iz -1tell2 -4te3lo -3te4ls -tem3a -4te1m2e4 -t4e5m4on -te1mo -ten4ag -ten1a -4te2n3a2r -4t4ene -t5en3m -5tenn1a -ten1n4 -4ten1o -te5n2o1g -tent4a -te2o -teo5l -2tep -te3pe -tep5i -tera4c -ter1 -t4era4g -t4era2to -3ter3b -5t2erd -2t2er2e4 -ter3e1b -ter5ec -5terel -te3reo -3tere4s4 -1teri -ter3i1a2 -ter5i2d -ter5i1f -t4er3in -ter5iorit -ter2i3o2 -teri5or -terior1i -t4er3i2t -ter5k4 -5tern3it -ter2n2 -tern1i -ter5no -3te4r1r4 -2t2es -4t4es. -tesi4 -t3esq -t3ess. -te4ss -t5ess2es -tes4t2 -test3a -5teste2 -test5er1 -test5in -te2sti -test5or -tes5tu -teti4 -tet1r2 -tet1r5o -tew3a2r -te1wa -3tex -2t3f -t3g -2th. -tha4 -th5al. -thal3m2 -4t4he. -the2 -4th2ea2 -th5eas -4th2ed -1th4ei -3theo -theo3l -t4her1 -5ther2ap -th5er1c2 -t5h2erd -4th4e2r4ed -th2er2e -th3er2n2 -th3e2r2y -4t2hi. -t5hill2 -thi4l -3th4i4n2k -th1in -5th4io2 -th4is. -th5lo -t4hl -2t2h1m2 -th4mi -th3oli -4t5h2oo -4th1o2p1t2 -4thores -th2or2e -3th2ot -5thoug4 -th2o4u2 -1t2h2r -2t2h1s -5thu4r -5thy2m -th2y -3thyr4 -th4y2s4 -4ti. -1t2i2a2 -ti3ab -2t3ib -5t4i5bu -t1ic -t3ic. -tic5as -t2i1c2i2 -tici5a2r -tic2i1a2 -3t4i3cin -t4icity -ti3col -t2i2c2o2 -t2ic1u -4ticule -t3id. -ti2d -t4id1a -3tidi -ti3die2 -t5i4d1s2 -3t2i2en -tie2 -1t2i1f2 -ti3fe -4ti2ff -4ti4f3ic. -3t4i1g2i -t2ig -ti3g2i5o2 -4ti2g1m4 -5tigu -ti4ka -t2ik2 -ti4let -5til1in -t4ill2 -til4l5ag -til1l2a -t4ilt -1tim -tim1a -5ti1me2t4 -tim2e -t1in -5ti5nad -tin1a -4t4i2n2ed -tin3et -ti4ng5i4ng -t4in2g2i -ti4ng -3t2in1n4 -4ti4n1s2 -t4int -tin4te -tin5t4ed -tint5er1 -tin3ue -t2in2u -1t2io2 -ti3oc -tiol3a -t4i1ol -ti5o3mo -ti2om -4tionem -tio2ne -1t2ip -ti5plex -tip2l2 -ti3pli -t2i4q -ti5q2ua2 -tiqu4 -t3iris -t2ir -tir1i -2t1is -3tis1a2n -ti4sa -ti2s4c2 -tish5i -tis2h -3t2i4s1s -tis2t2 -5t4iste2 -t4is1tr2 -ti5t4a2n -tit1a -tith4e2 -t2ith -tit5il -t3i2t1is -3ti2tl -ti3tra -t2itr2 -3t2i1u2 -2t1iv -tiv5all2 -t2iva -t3ive -tiv3is -2tl -t1l2a -tlant4 -tla2n -5tle1b -5tle5dr2 -t2l2ed -3tle1f -3tlem -5tlen -5tletr2 -t1let -5tlew -t1li -tlin4 -4t3m -t1me2t2 -tm2e -tm2o4t5 -t1mo -2t3n2 -t4n2er2e -tn1er1 -2to. -toas4 -t2oa2 -to1b -4toc1c4 -t2oc -to3de5c -tod4i -to5do -3toe -1t2o1g -2t3o2i4d -t2oi -5tok -4to2l2ed -tol4l2 -tolu5 -to5ly -to2m3ac -t2om -to1ma -to1ma4n -tom2at5ol -to3mat -toma2to -to2m4b -to4m2o1g -to1mo -tom5os -to2n4e -t4on5ea2 -3ton1n4 -to4n3s2 -top4e -to5p2i1a2 -top1i -to4p4os -to1po -t1or -to5r2ad -tor1a -4t2or2e -tor5er1 -tori4as -tor1i -tor2i1a2 -to4r5oi -tor5p -tor4q -3tos. -t4o3s4p -tos4t2 -to5str2 -to5tal1is -t2ot -to5tal1iz -to3tem -tot5u -tou4f2 -to4u2 -5tou4r -t3ous -4tov -to3wa2r -t3p -tr2 -tra4c2o2 -4trad1d4 -tr2ad -4tra5ist2 -tr2ai2 -tra5q -trar2ch4 -t4rar1c2 -tra2r -tra5ven -tr2av -tra5ve2r1s2 -traver1 -trav5est2 -3tr4ay -4t4re. -tr2e -4t2r4ed -tre4mo -tren4 -tr4end5i -tr2e4nd -tre5pr2 -tre4s4s -4trew -t5r4icl2 -3t2r2icu -t2rie2 -tri5fli -tr2i1f -tr4i2fl2 -t5rifu5g4a -tri3fu -2tril -tri3li -tri3m2e -t2rit -4tr2ix -t4r2od -tro5f -5troo2p -tr2oo4 -tro4pha2 -tr2op -tr4o3sp -t2r2ot -t5ro1t2o -tro1v -3tr2oy -t4r4uc -tr2u3i2 -2t4r2y -tr4y2s4 -4t1s -t2sc2 -ts4h -ts2i -t4sil -tst4ay4 -tst2 -2t1t4 -tta4 -t3t1ab -t5ta2n -t5tas -t3t4ed -t4t2er2e4 -tter1 -t5ter3er1 -t5tes4t2 -t2t2es -t3ti -tti3tu -ttitud4 -ttitu5di -t3t2ler1 -t2tl -t3t1li -t5t2oi -t5t1or -t3tos -t4t5s -t4tu1pe -tt2up -t2ty -4tu. -t2u1a2 -tu4al5li -tu1al -tuall2 -tuar3i4 -tua2r -tu4bin -tu1b2i -tu5bu -tu5den -tud4e -tud5ie2 -tu5en -4tuf2 -t2u1i2 -tu4is -2tum. -3tu1mi -4tu4m1s -3tun1 -tu4n4a4 -tu4ne -tun5it -tun2i -tup5let -t2up -tup2l2 -tup5lic -tu5rac -tu4r -tur1a -t4ura2n -tur2b3a -tur1b -tur4d -turf5i -tur2f -5turit -tur1i -tur4n2 -5tur5o -1tut -4tu2t4iv -tut2i -t1w -t3wa4 -t2wi2 -twi5li -t3wit -t3wo -twon4 -4ty. -ty4a2 -5ty2ch -ty4let -tyle2 -tyl5i -ty5mi -1t2yp -3type -1tyr1 -2tz2 -t5z2i1a2 -tz1i -t5z2ie2 -2ua2 -u2a3c2i2 -u2ag -u2a5h -u1al -ua5lu -uan4o -ua2n -uan2t5is -uant2i -uant5it -uar3a -ua2r -uar2d -uar3i -uari4n -uar5te2r1s2 -uart2 -uarter1 -uar4t5i -ua5ter2n2 -u4ater1 -uba4 -ub5b2ly -u2b1b2 -ub2bl2 -u1b2i -u4b2icu -ub3lin -u2bl2 -ub5lo -ub3ra -ubr4 -4uc -u1c2a -uc1cen5 -uc1c4 -u4c2e4nd -u1cen2 -u4ch -u5c2hr -uc3l2 -u4c2om -u1c2o2 -uc2o5t -uc2tr2 -u2ct -uc3ub -uc5ul -u5cum -u5d2ac -ud1al -ud4e -ud5e1p -u4der1 -udev4 -ud4g -ud4i4cin -udi1c2i2 -ud3i2ed -udie2 -u5dinis -u2d1in -ud2in1i -u3d2i3o2 -u5di1t2io2 -u2do -u5d2oi -ud5on -u5d2or -ue1b4 -u4ed -uen4o -uen4ter1 -uen1te -uer3a -uer1 -ue4s4s -uest5rat -uest4r2 -uest2 -ues5tri -ue4t -uf2 -3u1f2a -u3fl2 -u4fo -u2ft4 -uga4c -ug5l2i1f -u2gl2 -ug2n1i -u2gn2 -u4go -ug3ul -ug3ur1a -ugu4r -uhem3 -uhe2 -2ui2 -ui3al -u2i1a2 -u2ic -ui3cent5 -u4ice -ui1cen2 -ui1d5o -ui2d -ui2l4a -uild5er1 -ui3lib -uil4t -uin1c5u -u2inc -ui4n4s2 -uint4 -uin4ta -ui5pr2 -u2ip -uis3er1 -uis4t2 -uisti4 -uit5er1 -ui5val5 -u2iva -ui3vo -u2iz -4ul. -u1l2a -u4l1ab -4ul4ac -ul5ard -ula2r -u5lat -ul4bo2 -ul3b -ul3ca -ul1c2 -ul4ch -5ulch4e2 -5ulchr2e4 -ul3c2hr -4ulea2 -u5lee -u1len4 -4ulen1c2i2 -u5lent -u1let4 -u2l4ev -ul2f2a -ul2i -ul4i1a2 -u3l4ine -ul3i4ng -ul5is2h -ul1is -u5liti -ul1it -u5lity -4ull2 -ul4lat -ul1l2a -ul4l5ib -ul1li -ul4l1is -ul4l1it -ul3m2 -u1lo -u5l2om -ulph3i -ulph2 -ul2ph3o -ulp5i4ng -ul4po -2u4ls -ul3s2i1f4 -u1lu -ul1v4 -u1ma -u2m3a2m -u1ma2r4 -u5mas -um4bar. -u2m1b -umba2r -um2b2i -umen4t -um2e -u1men -u1mi -u4m1ic -u2m5i1f -umi4fy -umi5l2i1a2 -um2il -umin4a2r -umin1a -u4m4i2n2ed -u4m3i4ng -u4mor1a -u1mo -u4mos -um2p -um4pa -ump3er1 -ump5li -ump2l2 -um2pt4 -ump5te -u1mu -umu4lo -un1 -u4n3a4 -un5ab -unabu4 -un4ae -un4as. -un2as -un2ce -un4dal -u4nd -un2da -un3d2ed -unde2 -un1de4t -undeter5m -undeter2 -un1di4c -un4die2 -un3do -un4dus -un1du -u3n2er1 -unho5li -un1h2 -unho4 -un2i -u1nic -un4ie2 -un3in -un4ine -un2i5p -uni3s4o -un3ist2 -un2i1v -un3iz -unk5eri -u4n2k -unker1 -un5ket -un3kn2 -2un1n4 -un4nag -unn1a -un5o -un5r -u4n3s4 -un5s2h2 -un2t2i -until4 -un2u4 -un3us -uo3de -uo3dent4 -u5oros -u3os -uo5t4a1t2io2 -u2ot -u1o4u2 -2up -u1pat -u1pe -u5p4ee -uper3 -u1ph -u5pi2d -up3i4ng -u4po -u5pol -u2pr2 -upr2e4 -u5que4t -uqu4 -u4r -ur1a -4u4ra. -ur2a4c2i2 -4urae -ura2g -4urant2i -uran2t -ura2n -uras5 -urb5i4ng -ur1b -urb2i -ur2bin -ur2c2 -urc3a -ur5den. -ur5den1i -ur5die2 -ur4du -ur3ea4 -ur2e -ur5ee -ur1er1 -ur3e2r1s2 -ur1e2t -ur3e2t2t4 -ur2f -ur3f2a -ur1i -u5r2i5cu -ur4ie. -urie2 -ur5ifie2 -ur2i1f -uril4 -ur4ili -ur5ion -ur2i3o2 -uri4os. -ur2l5er1 -ur1le -ur5lie2 -url5i4ng -ur1m4 -urn3al -ur2n2 -urn1a -urn3er1 -ur4n5s -ur1o -ur2o4d -ur5o4m -ur5ot -uroti4 -ur3pen5t -urpe2 -ur2ph4 -u2r2s2 -ur2s5a2l -urs5er1 -ur2se -ur3s2h -urs3or -ur5ta -urt2 -ur1te -ur5t2es -urth2 -ur3the2 -urti4 -ur1u -ur4va -ur1v2 -u3sad -us3a4g -us3a2l -us4ap -us3a1t -2usc2 -us4ca2n -us1ca -ush5a2 -us2h -us5ia2n -us2i1a2 -usil5 -u4s1in -usk5er1 -us2k2 -uske2 -us1p -us4pa -uss4e -u4ss -4ust2 -us3tac -us5ta2n -ust4ic -us5t2i1c2i2 -ust5ig -ust3il -us1to4 -us1tr2 -us4tr2e4 -usur4e -usu4r -us5ur1i -u3t4ane -uta2n -utch4e2 -u2tc2 -ut2ch -ut5en1i -u5te2o -u4t2er2e4 -uter1 -ut2i -u3tie2 -ut3i4ng -ut1in -u5t2in1i -u3t2io2 -ut5i2s1m4 -u2t1is -ut3is2t2 -5u5t1iz -ut3le -u2tl -ut1li4 -ut2o -u4to5s -u4t1ra -utr2 -u4t1s2 -ut5s1m4 -ut4to2n4e -u2t1t4 -u3tu -u4tul -uu4 -uv2 -u4va -uve2 -uven3 -uv5eri -uver1 -u5v1in -ux2o -uy4a2 -uy5er1 -4va. -2v3ab -5vac -va1c2a -va5ceo -vacu1 -v4ad -3vag3a -va4ge -4va2g2e4d -vager4 -va2g5r2 -v1al. -1v2ale -vali2 -va5lie2 -val4ise -val1is -5valu -5val4v -vam4i -v2a2m -va5mo -5van1n4 -va2n -van2t2a4 -4van2tl -var4is -va2r -var1i -4vas2e -vas5el5 -v5a4so -v2ast3a -vast2 -v4at. -5vatee -v4at4in1a -va2t3in -4v2a2tu -2ve. -ve2ct4 -ve3g -3vei -2vel -vel3at -vel2a -4v2ele -v3e2l1er1 -ve5l4ine -v1ell2 -v4el1l2a -vel5ler1 -vel3li -vel5op1i -ve4n4al -ven1a -ven4do -v2e4nd -v4e1ne -ve5n2i1a2 -ven1i -ven2t5o -ven4tr2 -4v4en2u -v5en5ue -5ve3o -5ver1b -ver1 -verde5v -v2erd -4v4er2e4 -ver5ea4 -ver3ei4 -v5er3ie2 -ver3m4 -ver4ne -ver2n2 -5ver2se -ve2r1s2 -4v4es. -4vi. -5vi3al1it -v2i1a2 -vi4a2tr2 -vi3at -vi1b4 -vic2 -vi4ca -vi5car1i -v2i1ca2r -vice3r1 -v4ice -5vi2ct2 -5v2icu -5vider1 -vi2d -vi2gn3 -v2ig -vi4l -vil3i -3vil2i4a2 -v5ilise -vil1is -v5ilize -vil1iz -vil5lin -vill2 -vil1li -vim4 -5vim2e -2v1in -vin4a2c -vin1a -3vin1c2i2 -v2inc -vin2e -5vinit -v2in1i -v5in1iz -vint4 -vin5ta -3v2i1o2 -v4i1ol3 -vi5om -5v2i3p -vir2e4 -v2ir -vi5r2i4d -vir1i -vir3u -5visecti -v4i1sec -vise2ct -5vi1s2io4 -v3i2s1m4 -2v5ist2 -vi2t -vit2a -vi3tal -vi5te2l -v5it1ie2 -v2it1r2 -vi3tu -v3ity -viv5al -v2iva -viv5or -vi2vo -v2i5zo -v1iz -1vo -2vo. -vo2l -vo5li1t2io2 -vol2it -vol4u1b2i -v4ol1ub -volv4 -4von -vo5rac -vor1a -3vor1c2 -4v2or2e -3voro -vo3ta2r -v2ot -2vow -vr4 -v5ra4 -v5ri -v5ro -vr2ot4 -4vs -v3ur2e -vu4r -2vv2 -v5ver1 -v5vi -4vy -4w1ab -wag3o -wais4 -w2ai2 -w3al. -wal2l5er1 -wall2 -w3a4ls -wan5gli -wa2n -wa4ng -wan2gl2 -wank5er1 -wa4n2k -war5d2ed -wa2r -ward5er1 -ward5r2 -war4f -war4te -wart2 -war5th2i -war2th -wa4ss4 -was4t2 -wa1te -wav4in2e -w2av -wa2v1in -w1b4 -w4bon -wbo2 -w5c -w5die2 -w3dr2 -we4b -w4ed -3we2ed -5wei -weight5i -we2ig2 -wei2gh -weigh2t -we2ir4 -wel3i -wel1iz4 -wel4iz3i2 -wel4li -well2 -went4 -wes4 -west3 -w5e4st. -w5f -wh2 -w5hi2d -wi2 -wid4e -wi2d -wi5er1 -wie2 -wil2l5in -will2 -wil1li -wim2p -win2e -wing5er1 -w4inge -wi4ng -win4tr2 -3w4ise -with5eri -w2ith -wit4her1 -withe2 -w3l2a -w2l1er1 -wl1i -wl4ie2 -w1m -1wo -wol4 -wol5ver1 -3w2om -w2on2t -word5i -wot2ch4 -w2ot -wo2tc2 -w2oun4 -wo4u2 -wp5in -wra4 -ws5i4ng -w2sin -w5ster1 -wst2 -wste2 -wt4 -w5te -w3to -wy2 -wz4 -x1a -x4a2ch -x4ade -x2ag -x3a2g1g -xa5me2t -x2a2m -xam2e -x3am1i -xa4n5d -xa2n -xan1o4 -x2as -xas5p -x3c4 -xc2av3 -xcor5 -x1c2o2 -xe4 -x1ec -xec3r2 -xe5cu3t2io2 -xecut2i -xecut5o -x2e2d -x5e2dl4 -x5e4d1n2 -x5eg -x1em -x3en -xen4op -xen1o -x3er1 -xer4g2 -xer3o -x1h -xhort4a -xh2ort2 -x1i -x3ia. -x2i1a2 -x4ias -xi4c -x5i1ge -x2ig -xim3a -x4im2e -xi1me2t4 -x3io2 -x2i4p -x4it. -x4i4t1s -x1o -x4ode -x5om -xo4mat -xo1ma -xo4n -x4os -xotr2op4 -x2ot -x4o1tr2 -x3p -xpel4 -xp4o5n2 -x1po -xp2oun4 -xpo4u2 -x1s2 -x1t2 -x4t4ed -xtens5o -xte4n2s -x1ter3i -xter1 -xter4m3 -xter2n3 -x4th -xti4 -xtr2a5d -xtr2 -xtr2a3v -xtr2e4 -xu4o -x1u4r -xur4b -x5us -x5w -xx4 -x4y2s4 -xy3t -y1a2 -y5ac -1y2a2r -3yard -yas4i -4y1b -yb2i -yc2a5m -y5chede2 -y2ch -y4ch2ed -yche2 -ych5is -y3cho -y4chose -yc1l2 -ycl2a2m4 -ycl2a -y4coli -y1c2o2 -y4coll2 -yc2om4 -y2cos -y1d4 -yda4 -yder4 -ydro5s -ydr2 -y4dro4u2 -y3ee -yel5o -y3en -y1er1 -y3e4st. -yest2 -yes5te2 -y5e2t2t4 -y5f -y1g -y1g2i2 -yg2i5a2 -y3gl2 -yg2o4i -y1h -y1i -y3in -yle2 -ylin5de2 -yl2i4n4d -yllab5i -yll2 -yl1l2a -yll1ab -yl3os -yl5o4u2 -y1m2e4 -y3men -y5me2t -y5m2i1a2 -ym5in -ym2ot4 -y1mo -ym4pha2 -ym1p -yn1 -yn5ago4 -yn1a -yna4nd5 -yna2n -yn5ap4 -yn5ast2 -yn2as -yn4c2i2 -y4nd4 -yn2e -yn3er1 -y4ng4 -yn4gol -yn1go -yni4c -yn1i -y2n4y2 -y1o2 -yo3d -yo4g4i4s -y2o1g -yo3g2i -y2oun4 -yo4u2 -you4ng5 -2yp -yp5al -yper3 -y5p2er2e -y4p4eri -y4pero -y4pet -y2ph -yph4e4 -yph3i -y4p1i -y2p1n -y1po1 -y4po4x -y2pr2 -yp5ri -yp4si -y2ps2 -yp5sy5f -yps2y -ypt3a -y2pt2 -y5pu -y3rag -yr3at -yr3ic -y5r2ig -yr3is -yr3i4t -yr5o1lo -y4r4r4 -y2r4s2 -yr5u -4y2s -ys5a4g -ys5a1t -y3s2c2 -y3s2h -ys1ic -y2s3in -y1s2i4o4 -yso5 -ys4so -y4ss -ys1t2 -ys4to -y3u -yv4 -y3w -yz5er1 -yzy4 -z1a1 -2za. -za4bi -z1ab -z2a2i2 -z4as -za4te -zd4 -ze1b4 -z2e4d -zen4a -z5e4ng -zer5a -zer1 -z3et4 -z1i -zib5 -5zic4 -z2ie2 -zi5m -zin4c3i2 -z2inc -z3i4ng -z4in2g5i -z4is -3zlem -z3ler1 -z3li -4zo. -5z2oa2 -zo3a2n -3z2oo2 -zo3ol -zo3on -zo5o2p -zo5oti -zo2o2t -zo5p -z2ot2 -z5s -5zum -4zy. -zz2 -z3za2r -zz1a1 -z5z4as -z3z2ie2 -zz1i -zzo3 -z5z2ot2 diff --git a/dist-packages/wordaxe/wordaxe/dict/hyph_en_US.dic b/dist-packages/wordaxe/wordaxe/dict/hyph_en_US.dic deleted file mode 100755 index c2c5f8e2f..000000000 --- a/dist-packages/wordaxe/wordaxe/dict/hyph_en_US.dic +++ /dev/null @@ -1,11388 +0,0 @@ -ISO8859-1 -.ab4i -.1ab -.ab3ol -.abo2 -.ace4 -.ace2t3 -.a2ch4 -.ac5t2iva -.a2ct -.ac2t1iv -.ad4d1in -.ad1d4 -.ad3di -.ad3e -.a2d3o -.4a2e5d -.aer3i -.aer1 -.a2f3f -.a2f3t -.ag4a -.4a2g5n2 -.a2ir3 -.2ai2 -.al5im -.4al1k -.al3le -.all2 -.a1m5a2r -.2a2m -.ama5te -.am1at -.am2i -.am3pe -.am2p -.am3ph -.a2n1 -.an1a3b -.an2a -.an2a3s -.a4nd2 -.an5da -.an4el -.a4n4en -.an4gl2 -.a4ng -.an4on. -.an1o -.a4n3s -.an2t3a -.an3t2i3 -.4ant4ic -.an4t5o -.a2n2y5 -.a3ph5or -.2ap -.a1pho -.ap4i -.ar5ab -.a2r -.ar5ap -.ar4c2i2 -.ar1c2 -.ar5d -.ar4e -.ar1i4 -.ar4ise -.ar4isi -.ar5sen -.a2r2s2 -.ar2se -.ar4t5icl2 -.art2 -.ar1t4ic -.as1 -.as4q -.as5s1ib -.a4ss2 -.at5a2r -.2a2ta -.ateli4 -.ate2l -.at5omi4se -.a2to -.at2om -.atom1i -.at5om1iz -.2a2t3r2 -.a2t3t4 -.au3b -.2au2 -.au3g4u -.aur4e5 -.au4r -.aus5 -.authen5 -.au3th -.authe2 -.2av4 -.av5era -.aver1 -.bap5ti2s1m4 -.b2a4p1 -.ba2pt2 -.bap2t1is -.barri5c4 -.ba2r -.b2a4r1r4 -.bar3ri -.bas4i -.1bas -.ba5sic -.be3d2i -.b2e -.2b2ed -.be3lo -.1bel -.be5r4a -.ber1 -.be5s1m4 -.be1s2 -.bi4er1 -.b4ie2 -.blaz5o -.2bl2 -.bl2a -.b4l2az2 -.bo3lo -.bo2 -.bo1s5o2m -.bou4n4d -.bo4u2 -.b2oun1 -.bov4 -.3bra5ch -.br4 -.br2e2 -.burn5i -.bu4r -.bur2n2 -.ca3de -.ca4gin -.ca1g2i -.cam5i -.c2a2m -.ca1m3o -.ca2n1 -.can5t2a -.ca5p2itu -.1c2ap -.cap1i -.car4i -.1ca2r -.cas5u1al -.3cas1u3 -.cas2ua2 -.ca4ti -.c1at -.cen5so -.1cen2 -.ce4n2s -.cen5ten1a -.3cent -.cen1te -.cen4t5ri -.cen1tr2 -.cer4i -.cer1 -.2ch4 -.cit4a -.1c2i2 -.cle1m5e4 -.cl2 -.c2le2 -.clima5to -.cli1m -.co5i4t -.1c2o2 -.c2oi -.co3pa -.cop5ro -.co1pr2 -.c4o3r1u -.co3si -.co5ter1 -.c2ot -.coty3le5 -.5coty -.cri5t2i1c2i2 -.cr2 -.crit1ic -.cust2om5 -.1cus -.c4ust2 -.cus1to4 -.3d2av5 -.dea5c2o2 -.dea2 -.de5lec -.d2ele -.del5eg -.de3li -.de3l2i5r -.1d4e1m -.de5nit -.den1i -.de3n1o -.der2 -.de3ra -.de5re4s -.d4er2e -.1de3ri -.de5sc2r2ib -.5de2sc2 -.descr2 -.de5ser1v2 -.deser1 -.de5signe -.des4i -.des2i4g -.desi2gn2 -.de5s2ir -.de5s1is2 -.de5sp2oi -.des1p -.des2po -.determ5i -.1de1t -.deter2 -.de3ve -.de4w -.di4al. -.1d2i1a2 -.dia3s -.di4at -.din4a -.2d1in -.di2o5c -.3d2i1o2 -.1do2 -.do4e -.domest5 -.d2om -.dom2e -.do2me2s -.du4al. -.1du -.d2ua2 -.du1al -.3d4u4c -.d4y2s3 -.2d2y -.eas4t5 -.ea2 -.ech1in5 -.ech3i -.e2ch -.e1c2o3 -.e2c3t -.e1d5em -.2ed -.ede2 -.ed4it. -.ed1it -.ed4iti -.eg4 -.ei3d4 -.e2i5r -.e2l3ev3 -.2ele -.el2i -.elu5s4 -.e1lu -.e2m3b -.em5in -.em1p4 -.em5p4y -.en1 -.en5c -.en4d2ed -.2e4nd -.ende2 -.e4n3s -.ent2 -.en5ta -.eo1s5 -.epi1 -.epi3d -.er2a -.er1 -.er5em5 -.2er2e -.er4i4 -.er4o2 -.eros4 -.er2ot3 -.er4ri -.e4r1r4 -.es1 -.esc1al5 -.e2sc2 -.es1ca -.es3p -.es3t2 -.eter2n5 -.eter2 -.eth3e2 -.e2th1 -.eu1 -.eu4r4 -.eval3 -.evol5ut -.e1vo -.evo2l -.ew4 -.ex1 -.ex3a -.eye3 -.fal4le -.1f2a -.fall2 -.far4i -.5fa2r -.fec5un2da -.3fec -.fecun1 -.fecu4nd -.f2e4n4d -.feo2ff5 -.feof2 -.fi2 -.fi5l2i1a2 -.1fi2l -.fil5tr2 -.fi1n5e4ss -.2fin -.fin2e -.fine4s -.f1i4n3g -.fi5n4it -.f2in1i -.fi2s4c5 -.3f2o3c -.1fo -.fran5ch -.1fr2 -.fra2n -.fu5g4a -.3fu -.g2a4m -.ga1m5e2t -.gam2e -.gen4et -.3gen -.g4ene -.ge5neti -.gen5i1a2 -.gen1i -.ge3ro -.ger1 -.glor5i3o2 -.2gl2 -.3glo -.glor1i -.gnost4 -.2gn2 -.g2no -.gno4s -.go3no -.3gos3 -.h1ab2 -.ha2 -.ha5bili -.hab1il -.hama5 -.h2a4m -.han4de2 -.ha2n -.ha4nd -.hast5i -.hast2 -.h4e4i -.he2 -.hem5a -.hi2 -.hi3b -.ho2l -.ho5rol -.hov3 -.hy3lo -.h2y -.hy2l -.ico3s -.2i2c2o2 -.idi2 -.i2d -.2ig3 -.i2g1n2 -.il4i -.i2m5b -.in1 -.2i4n3d -.in3e2 -.2in2i -.2in3o -.in3t -.inve2st5i -.in3v2 -.inve2 -.invest2 -.i4r3r4 -.2ir -.i2s4c2 -.is4li -.i2s1l4 -.is4o -.i1s2o5m -.ka5ro -.ka2r -.ki4e2 -.kin3e -.k1in -.lab4o2 -.l2a -.l1ab -.la4m2e -.l2a2m -.lam5enta -.la3men -.lan5i -.la2n -.lash4e2 -.l4as -.las2h -.le4m -.len5t2i -.le2p -.le1p5r2 -.les5son -.3le4s4s -.les2so -.le5va2n -.2lev -.l3eva -.libra2r5 -.lib1r4 -.lig3a -.1l2ig -.l2i3o2 -.li4o4n1s2 -.l2i4p -.loc3a -.1l2oc -.lo4g2i1a2 -.1l2o1g -.lo3g2i -.lo2p -.loph3 -.lous5i -.lo4u2 -.lov5er1 -.lub3 -.ly1o3 -.2ly -.mac5u -.mal5ad5 -.mal2a -.ma5l1in -.mar5ti -.1ma2r -.mart2 -.m4a2th5 -.me5lo3d2i1o2 -.m2e -.melo4di -.ment4 -.1men -.men5ta -.me5r2i2d -.mer1 -.me5r3in -.met4er2 -.1me2t -.met1e -.mi4e2 -.mi3gr2 -.m2ig -.min5ue -.m2in2u -.mir2k4 -.m2ir -.mis1 -.mi5to -.mi2t -.mo3b2i -.1mo -.mo5le2c4 -.mon3a4 -.m4on -.mor5ti -.m2ort2 -.mu3n2i -.mun1 -.mu3si -.mus2i5c2o2 -.myth3 -.2my -.3myt -.n2a5k2 -.n1a -.nar1i4 -.na2r -.nast4 -.n2as -.nas5ti -.ne2c3t -.ni4c -.n1i -.ni5tro -.n2itr2 -.n2o4c -.no2m3o -.n2om -.nos3t2 -.no5t1ic -.n2ot -.nuc2le5 -.n2u -.5n4uc -.nuc3l2 -.o2b2ed5 -.ob2e -.o1b3e4l -.o2b3l2 -.od4 -.o2e4d5 -.oe5so -.o2f5t -.2oi4 -.ol4d -.om2e2 -.2om -.om5el -.on4ce -.on1c -.o2n4e -.op2i -.op2t5a -.1o2p1t2 -.or1 -.or4at4 -.or1a -.ora5tor1i -.orat1or5 -.ora2to -.or5che2 -.or4ch -.or1c2 -.or3d -.2or2e4 -.or3eo -.or4i -.orn1er4 -.or2n2 -.or2o -.os1 -.osi4 -.4oth5 -.2ot -.out1 -.o4u2 -.ov4 -.pal5i -.para5di4s1 -.1pa2r -.par2ad -.par5af -.par1a5t -.p2a5ta -.pa4t2io2 -.pe2c3t4 -.pecu3 -.3ped3e2 -.2p2ed -.p2e4nd4 -.pen5de2 -.pe2p3t2 -.per3i5n -.p4eri -.per1 -.per3se5c -.pe2r1s2 -.per2se -.pe5titi -.3pet -.pe2ti -.ph2 -.phe5n2o2m -.phe4 -.phen1o -.phon4i -.1pho -.ph2o4n -.p2i2e2 -.pi3l3a -.p2il -.plast4 -.p2l2 -.1pl2a -.pl4as -.plic4 -.plica4 -.plos4 -.po3l2a -.1po -.po5lite -.pol2it -.po2p -.p4op5l2 -.po5si1t2io2 -.3p4os -.pos1it -.pos5si -.po4s1s2 -.pro5bat -.1pr2 -.pr4oba -.pu4r4r4 -.pu4r -.put4te -.pu2t1t4 -.ra5cem -.ran5g2i -.ra2n -.ra4ng -.re3c2a -.r2e -.r2ef5er2e -.re2fe -.re1f -.refer1 -.re5ga2r -.re1i4 -.re5lin -.re1m -.re5o -.res5c2i2 -.re2sc2 -.re5sen -.re5s2po -.re5stat -.rest2 -.r2e5s4t2or2e -.rest1or -.re5st4r2 -.re3t2a -.re5u -.re3w -.rib5a -.2r2ib -.rin4 -.rit2 -.rol4l2a -.roll2 -.r4os3a -.sa2 -.sac5r2 -.sal4i -.sa2l -.sa5l1in -.salt5er1 -.s4al4t -.sal1te -.sanc5 -.sa2n -.s4ap5a -.s3ap -.sa3vo -.s2av -.sci3e2 -.sc2 -.s1c2i2 -.sea3s4 -.se2a2 -.se2ct4 -.sec5to -.se3gr2 -.sen3t -.se1q -.ser4ie2 -.ser1 -.s2es1 -.sev5era -.3sev -.sever1 -.s2h2 -.si5g2no -.s2i4g -.si2gn2 -.s1is3 -.st4 -.sta2t4o -.stra5to -.str2 -.str4in2g5i -.stri4ng -.su5d4a -.sulph5a2 -.1s2ul -.sulph2 -.sul3t -.tact4i -.ta2ct -.tac5t2ic -.t2a4m -.ta1ma2r5 -.tar5o -.ta2r -.te2ct4 -.tel5a -.te2l -.tell5e -.1tell2 -.te4m -.te5ra5t -.ter1 -.ter4p -.th4 -.tho4 -.thol4 -.ti2 -.til4 -.t2i5n1i -.t1in -.t3i2t4is -.t1or1 -.tran4c -.tr2 -.tra2n -.tri5bal -.t2r2ib -.tri3d -.trin4a -.t4ri5sti -.trist2 -.tro4ph -.tr2op -.tro1ph5o -.tro4v -.tula2r5 -.tu1l2a -.tur1b4 -.tu4r -.tur1i4 -.tu5te -.1tut -.tu3t2o -.4ul4l2 -.ulti5mat -.ultim4a -.ul1tim -.un5ce -.un1 -.un5ch -.u4n3d2 -.under5 -.unde2 -.un3e -.u4n3g -.u1ni3c -.un2i -.un2i3o2 -.u4n3k4 -.u4n5s4 -.un3t4 -.un5u4 -.2up1 -.up3l2 -.ur1a4 -.u4r -.ur5e2th1 -.ur1e2t -.ur2e -.ur4o -.va5l2ed -.1v2ale -.ve2 -.vec5 -.ve5lo -.2vel -.vent5il -.vent2i -.v5er4ie2 -.ver1 -.ver3n2 -.vic5to -.vic2 -.5vi2ct2 -.vi2s -.vis3i -.vi5so -.v2o1c -.1vo -.vo5lut -.vo2l -.wine5s -.wi2 -.win2e -.xy3l -.za5r -.z1a1 -a4a -1ab -2ab. -2aba -ab5ar2e -aba2r -ab4ay4 -2a2b1b2 -ab5ber1 -abb2e -2ab2e4 -ab3erd -aber1 -a5b3e4r1r4 -a3bet -ab1ic -a3b4ie2 -2abin -4a1b2i2o2 -abi5on -ab3it1a -ab4itu -ab3l2a -a2bl2 -abli4 -4abolic -abo2 -abol3i -ab3om -ab3ota -ab2ot -3about -abo4u2 -ab1r4 -2abs. -a4b1s2 -ab1ul -abu4lo -ab3use -ab3usi -2a2by -ac2a -ac5a2bl2 -ac1ab -ac3al -5ac1anth2 -aca2n -ac5ard -a1ca2r -a5c1at -ach5al -a2ch -acha2 -a5ch2in1i -ach1in -ach5i2s1m4 -achro4 -a3c2hr -ach5u4r4 -2a1c2i2 -a4cic -aci4e2r1s2 -aci3er1 -acie2 -ac2i3f4 -4acit -ack5a -ack1 -ac3li -acl2 -4a4co. -a1c2o2 -aco3d -ac5on1r -acon1 -4acos -4aco4u2 -ac1r2 -ac3r2y2 -act5ate -a2ct -ac2ta -act5ile -ac2to -act5o2r2y -act1or -ac2t5r2 -ac5uat -ac2ua2 -a5d2ai2 -a3d2a3v -4adee -ad5en1i -ad4ha2 -a4d1h -ad3ica -a5d2i1f -4adil4 -adi4op -a3d2i1o2 -ad2i4p -adis4i -adi4s1 -a3diti -3adju -ad1j -5admi2t -a2d1m -a2do -4adoe -4ad2oi -ad3ol -a3d4os -ad1ow -ad1r2 -a3dr2a2m4 -4a2du -ad3u1l2a -ad3um -4a2d2y -ae5a2 -ae4cit -ae1c2i2 -ae1c2o3 -4a2ed -aed5i4s1 -ae5g -ae3on -ae5p -aero2d2y5 -aer1 -aer2od -ae4s -ae5si -aes3t2 -aet4a -ae2th4 -aet4or. -aet1or -aev3a -4af. -4afe -af5ta -a2ft -a4fu -ag4ar1i -aga2r -4ageri -ager1 -a5ghe2 -a2gh -a5g2i1a2 -a1g2i -agi4as -4ag2ino -4a2gl2 -agli4 -4a2g1n2 -ag3on1i -agor4a -ag5ot -a2gr2 -ag3ri -a3gru5 -2ah -a1h2a2 -aha2r2 -aha5r1a -a1he2 -a2h4n2 -a5h2oo -2ai2 -4ai. -a2i3a2 -a1ic -aid4a -ai2d -aid5er1 -a2ig2 -ai5gu -ai2l3er1 -ail3o -aim5er1 -aim2e -ain5de2r3s2 -a2i4nd -ainde2 -ainder1 -a4i5nea2 -a3ing. -ai4ng -a2in3i -a2in5o -aint5er1 -ain1te -air5a -a2ir -air5p -ai2r3s2 -ais1i -a5i2s1m4 -2a1j -a4ju -2ak -akel4 -ak5u -al5a2bl2 -al2a -al1ab -ala2ct4 -al4ac -a1l4ae -al5ais -al2ai2 -ala3ma -al2a2m -al5ance -ala2n -al3at -a5l2av -al2c3at -al1c2 -al3ch -ald5ri -aldr2 -2ale -a3lec -aleg4 -ale5ma -al5end4e2 -al2e4nd -a1leo -a2let -al3ib1r4 -ali4c2i2 -al5i4c5s4 -al1i2d -al3i1f -5a1l2ig -al1in -a5l2in1i -al2in5o -al5ipe -al2ip -al5ip2ot -ali3po -4alis. -al1is -4al2i1u2 -4alk -alk5ie2 -al4l1ab -all2 -al1l2a -al4lag -alli5a2n -al1li -all2i1a2 -al1l2ig4 -al4lis2h -all1is -a5loe -al3o1gr2 -a1l2o1g -a3l2om -a3l2oo -al1or -al4orim -alor1i -alos4 -a4lo4u2 -al3ous -a5low -al5pen -al3ph2 -al5tati -al3tie2 -alu3b -al5u4ed -a4lue -al3ues -a5lumn2i1a2 -alu4m1n2 -alumn1i -al1va -al5ver1 -alv5u -2a2ly4 -a5lyn1 -2a2m -a5mad -ama4g -ama4n5d -a1ma2n -a5marin4e -a1ma2r -amar1i -a3mas. -am1at -a5m4at1i2c -am5a2tu -am4bin -a2m1b -amb2i -3ambu -am5elo -am2e -a3men -am2e4n4d -am3era -amer1 -am5erl -am1i -am1i2c -am5ica -am2i1c5r2 -3ami2d -a3mili -am2il -am5i2ly -amin2i4f -am2in1i -am5in1iz -am4inos4 -am2ino -a5m2is. -a4mium. -a3m2i1u2 -ami3um -a3m4on -a1mo -amor5a -am2ort3 -am5ose -am2p -am5p4er3i -amper1 -amph2i5g -amp3li -amp2l2 -ampo5l -am3po -am3ul -amyl5 -a2my -a2n -an2a -a5nadi4 -an3ae -an3age -an2a5k2 -an3ali -an3ar1c2 -ana2r -a5nast2 -an2as -an4con1 -an1c2o2 -an3d4at -a4nd -an2da -and5au2 -and5eer1 -ande2 -an5de4l -an5d2i1f -and5ist2 -andi4s1 -an5dit -an4don1i -an1do -an4ea2 -an5eer1 -an3ell2 -anel5li -an3eu -a2n3ga2n -a4ng -ang2a -angov4 -an1go -an4gu4r -an1gu -4an1h2 -an3ic -an1i -an2i3f -an5i3fo -4an2i2g -an5ion -an2io2 -anis5te2 -anist2 -4anity -4an2i1u2 -an5no -an1n4 -4an2ny2 -an1o -an2oe -an3o1ma -an2om -anor3 -an2os -an5ot -a4n2s -an3s2c2 -an4s2c2o2 -ans3il -an2si -an4su4r -an3su -an2t2a -ant5a2bl2 -an2t1ab -an3t1al -an5t2a2m -an2te -1anth2 -an4thi -3an1t2h2r -4ant1ic -ant2i -an4tie2 -an4t3i4ng -ant1in -an2t4iv -an4to2n4e -an1t2o -an1t4r2 -an4tus -an1tu -an5tym -an2ty -an3ul -an2u -an3um. -an5u4m1s -a3nu4r -a5nut -a2n2y2 -an5y1a2 -a5ny1i -2ao -aol3i -5aow -2ap -4ap. -4apa -a1pac -ap3al -ap5aro -a1pa2r -ape5li -a5peu -aph5em -aphe4 -aph3i -aph5ol -a1pho -a3phy2l3 -aph2y -ap1i -ap5icu -ap3in -ap4in4e -a5p2ir -a3pla2n -ap2l2 -a1pl2a -ap5li -apo5str2 -a1po -a3p4os -apost2 -ap4o3th -ap2ot -a2pr2 -ap5ron -4a2ps2 -apt5at -a2pt2 -apu5la2r -apu1l2a -a5p4un1 -a4q -a5q2ui2 -aqu4 -a2r -4arabi -ar1ab -ar2a5bo2 -aract4i -ara2ct -ara2g -ar3age -ar4a2g2e4d -ar5a1g2i -ar3ago -a3r2a1j -ar3all2 -ar2a3m -ara4n4g -ara2n -aran5te -aran2t -ar5apa -ar2ap -ar1at -a3r2au2 -ar2a3v -ar3ba -ar1b -arb5et -ar2b2e -ar4b2i2d -arb2i -ar4bl2 -arb3li -ar4bul -ar5chet1 -ar1c2 -ar2ch -arche2 -ar1ch5o -ar5din1a -ar2d2in -ar4do2ne -ar1do -ar3en -ar2e -ar2e4n5d -ar5e2t2t4 -ar3e1v5 -ar5gh -ar1g2 -ar3gu -ar3h2 -ar1i -ar5i2ff -ar2i1f -ar4ill2 -a5r4i5net -arin4e -ar5in1i -a5rishi -aris2h -arm3er1 -arm2e -ar5mi2t -ar3nal -ar2n2 -arn1a -ar3nis -arn1i -ar3od -ar5o2i4d3 -a4r2oi -aro4mas -ar2om -aro1ma -aro4n -a5roti -ar2ot -a5ro4uc -aro4u2 -a4r3o4x -arp5e2r1s2 -arpe2 -arper1 -ar4pu -2a4r1r4 -ar2rh2 -a2r2s2 -ar2s5a2l -ar3so -art5at -art2 -ar2th -arth4e2 -ar1t2h3r -ar5t1iz -2aru -ar3um -ar5un4 -a3ry1o2 -a2r2y -a5ry2t -ar5z -as1a -as4af -asa2n2 -2asc2 -as5con1 -as2c2o2 -as5c2ot -as2cr2 -as2e -as3e2ct -4a2s2ed -ase2p4 -ash5ay -as2h -asha2 -ash5i4l -as5i2ly -a2s3in -a5s2io4 -a3s1it -a4s5iv -ask5er1 -as2k2 -aske2 -aski4 -as4l2a -a2sl4 -as4lo -2aso -as5o2ch -as2oc -a4s4o2n2ed -as4o2ne -as5or -as3ph2 -a4ss2 -assa5g2i -as1sa -assa4g -ass5i2bl2 -as4s1ib -as4sil -as3s2it5 -2asta -ast2 -as4tat -as4t2i3a2 -as3t1is -as4tit -4asto2 -as3tra -astr2 -as4tri -as1u -as4un1 -as5u4r -2a2ta -4atabi -a2t1ab -a5tal1is -at2a2m4 -at2a3p -atar3a -ata2r -ata3s -ata3t4 -at3e2au3 -atea2 -at3e2ch -at5eer1 -a5tel. -ate2l -at2e5le -at5enat -aten1a -at3ent -4ater1 -at3era -at5er1n3is -ater2n2 -atern1i -at5ern3iz -4ate4ss -a2t2es -at5et -4a2th -ath3a4 -a3then -athe2 -ath5er3in -at4her1 -ath5ero -ath5ete -athet1 -ath3i -ath3od -a5th2o4n -a1t2h5r -4a3t2i2a2 -at1i2c -at5icis -at2i1c2i2 -ati5cit -at5ic1iz -a2t2i1f2 -a4t1i4l -a4tim -a2t3in -4atin1a -at5i4ng -4at4is. -a2t1is -at1it -atit3u -atitud5i -4a3t2i1u2 -at4ivi -a2t1iv -a5tiv1iz -a2to -5at5od -4a1t2o1g -2atol -4aton -a3t2oo -a4to2ps2 -a5t4oria2n -at1or -ator1i -ator2i1a2 -a4to2r2y -atos4 -a5t2oz -2a2tr2 -at3ra -a4tr2e -5at5re4s4s -at1ri -a3t2r2ic5u -at3ron -at5ro4u2 -at4tag -a2t1t4 -atta4 -2a2tu -at1ul -atu4m -at3ur1a -atu4r -at3ur1g2 -4a2ty -2au2 -4au. -au1b5i -4a4uc -au5cer1 -au1c3o2 -au4d5er1 -aud4e -audic4 -aul3i -aul4t -aul5t4ed -aul1te -ault5er1 -ault5i -au3ma -aun2 -aun5ch4ie2 -aun2ch -au4n3d -aun4dr2e -aundr2 -au5reo -au4r -aur2e -aur4o -au5ror -4aus. -aus5er1 -aus5p -au4s4t4ed -a4ust2 -auste2 -aut3a2r -aut3er1 -au3th -2av -a2v4ab -ava4g -av3a4ge -ava5l2a -av5alr4 -av5ant -ava2n -av5a2r -avas3 -av3e4nd -av3er2n2 -aver1 -av3ig -av4i1ol4 -a3v2i1o2 -av1is -aw5er. -awer1 -aw5e2r1s2 -aw1i2 -aw5n2ie2 -awn1i -aw5y2 -a4x -ax2i2d -ax1i -4ay -ay5l2a -ay3m -ayn4 -a4y2s2 -ay5si -ay5sta -ays1t2 -ayth4 -2az2 -az3a2r -az1a1 -aze4 -az5ee -azy1g4 -azz4l -azz2 -2ba. -ba5b2ir4 -b1ab -3back1 -baen4 -bag4a -5b2ah -b2a4i2 -bal3a -balm5i -balm2 -ba5lon -bal5u -bam4a -b2a2m -ban4a -ba2n -ba5na2n -b4ane -5ba4ng -b4aniti -ban1i -b4a4n2s -b2a4p1 -5bar1b -ba2r -bar4d -bardi4 -bar4n2 -ba5r2om -bar3o4n -5ba2r2s2 -1bas -bas4te2 -bast2 -b4a4th4 -3bat1i2c -ba5t2io2 -b4at5on -ba2to -battle5 -ba2t1t4 -bat2tl -2b1b2 -b4b2a2ta -b3bli -b2bl2 -b4bo2n4e -bbo2 -b3bon -b1c2 -bcord4 -b1c2o2 -2b1d -bdeac5 -bdea2 -bde4b -b1di4v2 -b2e -4be. -3bea2 -4beas -be3c2a -3becu -2b2ed -be3da -bed5el -bede2 -bed2i -be4do -be5dra -bedr2 -be4du -5bee -3be1f -be3go -be5gr2 -be3gu -1bel -be3l2a -2b2ele -be3l1it -bel4t -be3m -b2e4n4d -ben2d5a -bend5er1 -bende2 -b4e1ne -be5n2i2g -ben1i -b4e5n2u -4beo -be3q -2b2er2e -ber1 -berg2a5m -ber1g2 -berl4 -5be4r1r4 -be2r5s2 -b5er2t1in -bert2 -be1s2 -2b4es. -be3sl4 -be3tr2 -be3w -2b1f -b1f2a4 -4b1h -b4ha2 -2bi. -1b2i1a2 -bi4b1 -bi1cen5 -b4ice -3b2i2d -bid5i -b4ie2 -bi4e2r1s2 -bi1er1 -b2i1f4 -bi4fid. -bifi4d -bi5ga -b2ig -bigu3 -b1il -b2ile -5biles -3b2ill2 -4bim -bi1me2t5 -bim2e -5bin1a -5b2i4n4d -bind3e2 -bin5et -b2in5i4 -1b2i2o2 -b4i1o3l -bi2o5m -bi3o4u2 -b2ip4 -b2i5q -b2ir4 -bi3r2e4 -bi5rus -b2is -5bi2s1m4 -bis4o -bi5s2ul5 -3bit2u1a2 -4bity -bi5ve -b1j -4b5k4 -2bl2 -5bl4ac -bl2a -blag4 -b3la2n -5blast2 -bl4as -bl2a5tu -blem5at -3b2ler1 -5blesp -4b3l2ik2 -blim3a -bl2i3o2 -bl2i2q -b3l1is -4b2ly -2b1m -bment4 -bm2e -b1men -bmi4 -4b1n -bo2 -4bo. -3b2oa2 -bo5a2m -5bob -bod5i -bo5h2 -2bo2i4d -b2oi -4boke -bol4e -4bo2l2ed -bol3i -bol4t -3bon -bon4c -bo2n4e -bon4ie2 -bon1i -bon3i4f -bon4sp -bo4n1s2 -1b2oo -b3orat -bor1a -bor3d -bor5ee -b2or2e -bor5et -3bor1i -bor5ic -bor5i3o2 -bor4n2 -bot3a2n -b2ot -5boti -boun5t2i -bo4u2 -b2oun1 -3bou4r -bous4 -bow2 -bow3s4 -4boxy -bo4x -5b2oy -br4 -3bra2ch -4bral -br2a2m4 -b2ra2n -bra4n4d -4b4re. -br2e -b4reas -brea4 -4b2res -brev5et -bre1v -b2ri2d -5brie1f -brie2 -bri4ng5 -bri4os -br2i3o2 -b5rist2 -b4r2oa2 -bro4ma -br2om -bros4 -brum4 -4bry. -b2r2y -4b1s2 -b3sc2 -bscon4 -bs2c2o2 -bsen4 -bserv5a2n -bser1 -bser1v2 -b5si -b2sin4 -bso2 -bsol3e -bs2ol -bso3lu -b4stac -bst2 -bstu1pe5 -bs1tu -bst2up -2b1t -b5t1let -b2tl -4bu. -5bub -buf5fer1 -buf2 -bu2ff -buf1fe -b4ul2i -b4ulos -bu1lo -bun2 -bu4n4a4 -b5u5nat -bunt4 -bur3e -bu4r -bur4ri -bu4r1r4 -busi4e2 -bu4ss2 -bus5si -3b4ust2 -bu5ta2r -b3ute -b5ut1in -but2i -3bu3t2io2 -bu2t4iv -b5ut5o -b1v -4b3w -2by -4by. -3by1i -b4y2s4 -5byt -2ca. -c2ab5in -c1ab -c4ace -ca1c2o3 -cad4r2 -5caf -ca3go -5c2ai2 -5c2ak -c1al -c4al2a -ca5la1ma2n -cala3ma -cal2a2m -cal5a2r -3cal1c2 -ca5le1f -c2ale -cal2l5in -call2 -cal1li -cal4m2 -c2a3ly4 -ca3ma -c2a2m -cam4i -ca5na2r -ca2n -can2a -c2an4e -c4an1o -ca3n2oe -can5ta2r -can2t2a -can5t4ed -can2te -c4an4t1ic -cant2i -can4t4r2 -5c2ao -1c2ap -ca5p2il -cap1i -ca2pt4 -cap3ti -cap3u -1ca2r -ca3ra5c -car5am2e -car2a3m -ca3ree -car2e -ca3r4i3c -car1i -car3i1f -car5m -car3n1i -car2n2 -car3ol -car5o4n -car5oo4 -ca3ro4u2 -car4v2 -cas2e5 -cashi4 -cas2h -3ca4s3s2 -cas5t2ig -cast2 -3cas1u3 -c1at -c4at. -c2a2tc2 -c4at2om -ca2to -c2a3t2r2 -c4a4t1s -c2a2t4u -3c2au2 -caulk4i -cav3i4l -c2av -3c4ay -c1c4 -ccen1t5r2 -c1cen2 -c3cent -cces4sa -c5ce4ss -c3ch -cci3d4 -c1c2i2 -cc2ip4 -cc2le3 -ccl2 -4ce. -4c4e1ab -cea2 -cea2n3 -3ceas -ce4c2i2 -2c2ed -5ceda -ce3da2r -3cede2 -3cedi -4ce1f -ce5g -3ce2iv -cel3ai2 -cel2a -cel5ib5 -5cell2 -cel5lin -cel1li -celo4 -ce5l2om -4ce2ly -2cem -ce4me2t -ce1m2e4 -3cemi -ce4mo -1cen2 -5cenc -cen5c2i2 -cen5d2ed -c2e4nd -cende2 -cend5en -cend5er1 -cen3i -2cen1n4 -3cent -cent4a -cen5t4ed -cen1te -cen5ter. -center1 -cen5te2r1s2 -cen5t2es -1cep -cept3a -ce2pt2 -cep5t4ic -3cera -cer1 -cer4b2i -cer1b -3c2erd -ce3rem -c2er2e -5cer2n2 -5ce4ss -cest5o -cest2 -ces5t4r2 -ce2t -cew4 -2ch -4ch. -4ch1ab -cha2 -3chae -3ch2ai2 -cham5per1 -ch2a4m -cham2p -chan5g2i -cha4n2g -cha2n -ch4a3pa -ch2ap -chec4 -che2 -4ch2ed -3chee -3chem -che3o2l -ch1er1 -ch4eri -5cher3in -ch4erl -4ches -3chete -chet1 -ch5eu2 -che5va -che4v4 -3chew -ch5ex -5c2hi. -3ch2i1a2 -3ch2i2c2o2 -ch1ic -ch3i2ly -chi4l -ch4in. -ch1in -ch3in1n4 -3ch2io2 -5ch2i2p -ch2izz4 -ch1iz -ch5k -5chlor -c4hl -4c2h1m -1cho -ch2o3a2 -5ch2oc -4ch2oi -ch5o2i4d -3chor -4cho2r4ed -ch2or2e -chor5ol -4choso -3ch2ot -4choti -ch5ous -ch2o4u2 -chow5 -3c2hr -chu4r4 -3chut -5chy1d4 -ch2y -3chy2l -3chy2m -1c2i2 -4ci. -4ciac -c2i1a2 -ci2a4m -ci3ca -4ci4d1s2 -ci2d -4cie. -cie2 -ci3er1 -ci3es2t2 -c2i5et -c2i3f -cifi4 -4c2ig -ci3ga -c3iga2r5 -3cil -cil5lin -cill2 -cil1li -2cim -cim3a -ci3m2e -5ci1men -4cin3ab -cin1a -4c2i4nd -c4ine5a2 -cine5mat -ci5ne4ss -cine4s -4cint -c4i3ol -c2io2 -ci5om -ci4po -c2ip -cisi4 -c2it3r2 -ck1 -cka2r5 -cka5t -c4ke -ck5i1f -ck4sc2 -c4k1s -cl2 -cla5r2i1f -cl2a -cla2r -clar1i -3cl4as -c2le2 -2c4le. -c5lec -clemat4 -c2lev3 -cli1m -c3li4ng -cl2i2q -c1lo4q -c4l4o1tr2 -cl2ot -c4lue4 -cl2yp5 -c2ly -5cl4y2s -cn2 -c3n1i -1c2o2 -4co. -3c2oa2 -c4o5ba -3c2oc -co3c2i2 -co5cu -co3dic -co3d2i1f -4co2d2y -3coe -co5et -co3gr2 -c2o1g -4c3o2i4d -c2oi -co3inc -4col. -col3a -co3l2o1g -co1lo -5col1o4u2 -co5ly -co5mas -c2om -co1ma -co4m2e -co3mo4 -com1p4 -con1 -con4ati4 -con1a -con4ch -con1c -con3d5er1 -co4nd -conde2 -con4ey -co2ne -con4ie2 -con1i -co4n3s2 -c2on3t -conta5d -3c2oo -coo2p4 -co3or -cop4e -co3ph -c4o5p2l2 -co3po -c1o2p4t2 -2cor1a -cor5d2ed -cord5er1 -4co2r4ed -c2or2e -co3rel -3cor2n2 -4coro -co5rol -5c2ort2 -3cos. -c4ost3a -cost2 -cost5er1 -coste2 -co5ta -c2ot -3c4o3tr2 -5coty -co4us5t2 -co4u2 -cov1 -co3va -cow5a -c2oz4 -co5z1i -c1q -cr2 -5craf -craft5i -cra2ft -c4ra2n -5cran1i -cr4a5n2i1u2 -cras3t2 -cras2 -cra4te -c2r2e -4crea2n -crea4 -cre3at -cre4p3 -5creti -cre4t2o -cret5or -cri3l -cron4 -crost4 -4cro4u2 -5c4rus -c2r2y2 -crym3 -cry1o3 -4c5s4 -csim5 -2ct -c2ta -c3tac -ctac5u -c5ta5g -ct1a2n -ct5ant -c5tar2i1a2 -cta2r -ctar1i -c3ta2to -c1te -c4tea2 -c2t5ee -c4tent -cter4i1a2 -c1teri -cter1 -c2t5es -ct5et -ct2ic -c5tic2i1a2 -ct2i1c2i2 -c4ti4c5s4 -ctifi4e2 -c1t2i1f2 -c3tim -ct4in. -ct1in -ct4in1a -ct5i4ng -c3t2in1i -c5t2in5o -c5t2io2 -c3t2is -c3tit -c4titu -c4tity -ct5ive -c2t1iv -ct4iv1i2t -ct5o1lo -c1t2om -c3ton -c5toris -ct1or -ctor1i -c5tor1iz -c1tr2 -c2tr2e -ct2r2o5t -c1tu -c2tum -c1ty -cub3at -cuba4 -c4uf2 -cu5ity -c2ui2 -cu4l5ab -cu1l2a -c2ul2i -cul2l5er1 -c4ull2 -cul2l5in -cul1li -1c2ult -cu4mi -5cu4n3a4 -cun1 -cun4e -5cun2i -5cuol -cu5pa -c2up -cu3pi -c3up2l2 -1cu4r -cur4er1 -cur2e -cur5ial -cur1i -cur2i1a2 -4cur4o -1cus -cus5a -c3u2t1iv -cut2i -c3utr2 -5cuu4 -cu5v2 -2cy. -cy4b2i -c4y1b -1cyc -cyl3 -cy4m -cy5no -cyn1 -c4y2s4 -cys5to -cys1t2 -cy4t -cz2 -4da. -d4ab1r4 -d1ab -1d2ac -da2ch4 -d5ache2 -3da2ct -d1ag -d4a4g2i -d4ale -d4al1g2 -dal5ler1 -dall2 -dam5a -d2a2m -3dam2e -d3am1i -da5mu -3da4ng -da2n -d1an4t -d3ap -d3ard -da2r -5darm -3d4as2 -d2ast5a -dast2 -d1at -da2t1iv4 -d2a2t4u -dau2gh3 -d2au2 -daun5te -daun2 -3d2av -d3b -d3c4 -d1d4 -d4d4er2e -dder1 -d3di -d3d2ler1 -d2dl4 -d3dli -d3dy1i -d2d2y -2de. -de1a2c3t -dea2 -de5aw -de4b2i -de1b -deb5it -3dec -de5cant -dec2a -deca2n -de4cil -de1c2i2 -de1cr2 -4de2ct -ded3i -d2ed -def2or5e -de1f -de1fo -de4fy. -de3g -de4gu -de3io2 -5d4e3is -de3lat -del2a -de1li4e2 -del5ler1 -dell2 -del5li -de5lo -1d4em -4de4mie2 -4dem4is -d4em4o4n -de1mo -de4mo4n1s2 -de3mor -de4mos -4de2my -de1n2a -d2e4n4d -4d4ene -d3en1h2 -den2i4e2 -den1i -dens5a -de4n2s -dens5er1 -den5tit -dent2i -de3od -deo3l -deon2 -de3ont5 -de1p -depen4 -deposi4 -de1po -de3p4os -de2p4u -d3eq -derac4 -der1 -de3r2ai2 -d4er2e -4d4e2r4ed -de5reg -3der3er1 -1deri -der3k4 -3derm -der4mi -der5min -5derne -der2n2 -3dero4 -der5os -de2r3s2 -5d2eru -4d4es. -de3sa -5de2sc2 -des4ca -de5sc1al -de3sec -des4i -de3s4i2d -des5ig1n1a -des2i4g -desi2gn2 -des1p -des5p4o2n -des2po -de3sq -d3e4st. -dest2 -de2s3ti -1de1t -de3t2es -de5th1 -de2ti -dev3i4l -de3vis -de3vi2t -de4v2oi -de1vo -devol5u -devo2l -3dex -2d5f -dfol4 -d1fo -d2g -dg4a -d1gel4 -d4gen -d3gr2 -4d1h -dh2ot4 -d4hu -4di. -1d2i1a2 -di2ad -3dia2r -di5at5om -di3at -dia2to -4d1ib -d1ic. -dic5a2m -d4i4ce -d4i3ch -d5icl2 -dic5ol -d2i2c2o2 -1di2ct -dic5tat -dic2ta -dic4te -5dicul -d2icu -d5i1cu4r -1di2d -di4e2r1s2 -die2 -di1er1 -3di3ev -d4i3fo -d2i1f -dig3al -d2ig -di3g2a2m -dil4 -5dill2 -dilo4 -d4i3lu -di5mer1 -dim2e -di1me2t4 -di1m1i -2d1in -din4e -d4in5g2i -di4ng -d4i5nos -d2ino -3d2i1o2 -di2o4c -di4ol2a -d4i1ol -di2p5t2 -d2ip -3dir2e -d2ir -di3r1i -4d5iro -di4s1 -d4i2s3c2 -d4is3en3 -3d2is2i1a2 -3d2i4s1s -d4it4as -dit1a -d4iter1 -dithe4 -d2ith -d3ito -dit1or3 -2dity -1d2i1u2 -1di1v2 -di4val -d2iva -di5vin2e -di2v1in -dix4i -d2ix -d1j -2dl4 -d1l2a -5dle1f -5dlest2 -3dlew -dlin4 -d1lo -d5lu -2d1m -4d1n2 -1do -4do. -d4ob -do4c3u -d2oc -do2g4a -d2o1g -do4j -d4ol. -dol3en -do5l4ine -dol5it -do4lon -do1lo -d4o4ls -5dom. -d2om -do1ma2n4 -do1ma -domin5 -dom1i -dom5ino -dom5i2t -do5mo -don4at -don1a -4do2ny2 -3d2oo -d2or -4dor. -d2or4m -d2ort4 -d4os -do5sim -dossi4 -do4ss -dot1a -d2ot -dot4t1in -do2t1t4 -dot3ti -2dous -do4u2 -d4own -3do4x -d1p -dr2 -d5rail -dr2ai2 -d3ral -3dr2a2m -dra2n4 -d4ras2 -drast4 -3drel -dr2e -dres4 -dres2s5o -dre4ss -dri4e2 -d4r2i1f -dr2i4g3 -d4r2om -dro1pho4 -dr2op -dru4n2k3 -drun1 -4d1s2 -d5sl4 -d2s3m4 -ds4mi -d4sw2 -dt4 -dt5ho -1du -2du. -du1at -d2ua2 -3d4uc -du4ch5 -duci5a2n -du1c2i2 -duc2i1a2 -du4c2o2 -du5eli -du5ell2 -du5en -du5e2t2t4 -due4t -du5in -d2ui2 -dul3c2 -d3ule -d4ul4l2 -dum4b2e -du2m1b -du4n4a4 -dun1 -d5un4c -d2u2p -du3p2l2 -5dur1o -du4r -d5use -dust5er1 -d4ust2 -duste2 -du3u4 -d1v -dver2 -dvert3 -dvoc5at -d1vo -dv2oc -dvoc2a -2d1w -dwell3 -2d2y -dy4ad. -dy1a2 -d1y5a2r -5dy4e -5dyk -dyl2 -dyll3 -5dymi -3dyn1 -dys3p -d4y2s -d3zo -ea2 -4e1ab -e1a2ct -eac4te -ea5cu -e5ad1d4 -ead3er1 -ead1i -ead3li -ea2dl4 -ea4g -e2ak1 -eal3a -ea2l3er1 -e2ale -ea3l2o1g -eam4bl2 -e2a2m -ea2m1b -eam3er1 -eam2e -ean5i -ea2n -e2ap2 -eap5er1 -e3a4p1p2 -ear3a -ea2r -ear3er1 -ear2e -ear4li -e5a4r2r4 -ear4te -eart2 -earth5i -ear2th -eas5er1 -eas2e -ea4son1i -e2aso -e1a4s1s2 -eassem4 -eas4t2 -east5i -eat5en1i -e4at3er1 -eat5ie2 -e3a2t2i1f2 -eat1it4 -eat4it3u -e3at1ri -e2a2tr2 -e4a2tu -e2au3 -eav5i -e2av -eavi4e2 -eav5o4u2 -ea1vo -eaz5i -e2az2 -e1b -eba2r4 -e2b2b2 -eb2e4 -e4bel. -e1bel -e4be4ls -e2ben -eb5et -eb2i -e5b1il -e4bin -e4b2is -e4bl2 -e4bos -ebo2 -ebo1t3o -eb2ot -e2br4 -eb1ra -e2b2t -e4b4uc -ebus5i -ec2a -ec3ade -ecad5en -ec2al5e -ec1al -e5c2a2m -e4ca1po -e1c2ap -ec3at -ec5a2th -e1ce -ecen2t5o -e1cen2 -e3cent -ech3i -e2ch -e4cib -e1c2i2 -ec2i4f -ecip5i -ec2ip -e1cl2 -ec3l2ip -econ4s2c2 -e1c2o2 -econ1 -eco4n3s2 -econstit5 -econ3s2t2 -e2c3or1a -e4c5oro -ec3rat -ecr2 -e4c5rea2n -ec2r2e -ecrea4 -e4crem -ec1ro -ect5ati -e2ct -ec2ta -ec4ter1 -ec1te -ect2i4c -ec4tit -ec4t5us -ec1tu -ec1ul -e5c2ul2i -2ed -e5da4n2s -eda2n -e2d1at -ede2 -2e4d2ed -e5de1h2 -e4d2ele -edes3t2 -ede3te -e1de1t -edeter5 -e3dev -e5dew -ed4g -edi4a4ls -e1d2i1a2 -ed5i4c1al -ed5i4c5s4 -ediges4 -ed2ig -edi1ge -ed5i1gr2 -ed3im2e -ed1it -e1di2v2 -ediv5i2d -ed3li -e2dl4 -ed2or4 -e1do -e4do4x -ed1ro -edr2 -edu5cer1 -e1du -e3d4uc -e2dul -ed3u1lo -e4d5u4r -ee4ce -eed3er1 -e2ed -eede2 -ee4do -ee2f -ee5g -ee1i -ee2l1i -ee2m -eem5er1 -ee1m2e4 -eem3i -eep1 -ee4pa -eer4in4e -eer1 -eer3in -eesi4 -ee3to -e1f -efac2t5o -e1f2a -efa2ct2 -efal4 -ef5er3ee -efer1 -ef2er2e -ef5ini4te -e2fin -ef2in1i -e4fite -ef4l2 -efor5est2 -e1fo -ef2or2e -2e3fu -e4fug -efut5a -e1gel3 -eg2i5a2 -e1g2i -e4gib -e3gl2a -e2gl2 -eg3le -eg4mi -e2g1m4 -eg5n1ab -e2gn2 -eg1n1a -e5g4on -e2gr2 -e5gu4r -e1h2 -e5ho -e2h5s -eh2y2 -ehyd5r2 -ehy1d4 -ei2d4 -5ei1do -4e2i1f -e2ig2 -e5ignit -ei2gn2 -eig1n1i -e4in. -e3inc -e2ine -e1i4ng -e2in5i -e4ins. -ei4n1s2 -e2i4p4 -eir3o -e2ir -4eis -eis3i -eit5er1 -e2ith4 -e2iv -eiv3er1 -e2iz -e1j -ejudic4 -eju1di -ek3en -ek5is4 -ek4l2 -e4l4ac -el2a -e5lad -el5age -el2a2m4 -el5anc -ela2n -elast3 -el4as -e4la2t2es -ela4te -el5at3ive -ela2t1iv -elch5er1 -el1c2 -elch4e2 -el2ch -eld3er1 -2ele -elea5g -elea2 -4e4l2ed -el5en1i -el3en3o -ele3o -ele5ph1 -e2l1er1 -e1les -e5le4s4s -e4leste2 -elest2 -el3et3o -e1let -el3ev3a -e2lev -ele3vi -el5ex -e4l3ica4 -e1lie2 -eli4e2r1s2 -eli1er1 -e3lim -el3i4ng -eli3on -el2io2 -e4l1i4s -el2i2t4t4 -el1it -e3l4iv -el4l1ab -ell2 -el1l2a -ell5iz -el1li -e3l2oa2 -e3l2oc -elo5c2a -eloc3u -elo4di -e2l2o1g -elom5ate -el2om -elo1ma -elo3mat -el5op. -el5o2ps2 -elp5in -el3so -e4ls -el5tie2 -e1lu -elu4m -elus4 -elv4 -e5ly1i -e2ly -3elyt -e2m3ago4 -em3an3a -e1ma2n -e1ma2r4 -emarc5a -emar1c2 -em5at1iz -em2at5ol -ema2to -em5b2i -e2m1b -e1m2e4 -e4mee -e4mel -e3me2m -e4m3era -emer1 -em5ero -emet4e -e1me2t -em4icis -em1ic -emi1c2i2 -e4mie2 -e2m2ig -emig5ra -emi2gr2 -em3in1a -em5i4ng -e3m2i3o2 -em3i2s1m4 -e4mit1a -emi2t -e4m2i1u2 -em4mae -e2m1m2 -4emnit -e4m1n2 -emn1i -emo3b2i -e1mo -emo2d4u -emod1 -e2m2o1g -e4m2oi -em3o1lo -em5o1m -4em4on -e3mon1i -emon5ol -emo4no -e2mor -em5oris -emor1i -em3o4r1r4 -e4mot1ic -em2ot -e5m2oz -em1pa5r -em1p -em3pa -empara5 -em5pes -4emp4li. -emp2l2 -em4pr2e -em1pr2 -em3um -e5mut -en3a2c -en1a -e4nal -en3a1m3o -e1n2a2m -en4an1n4 -ena2n -e2n3a2r -en3as. -en2as -ena5tur2e -en2a2tu -enatu4r -3en1cep -en4cile -en1c2i2 -en3cil -en2c1t4 -2e4nd -en4d5al -en2da -en4de2dl4 -ende2 -en1d2ed -end5rit -endr2 -4ene -e2n2e5d -en3ee -e5nelle -enell2 -e5ne2p -e2n1er1 -e5nereo -en2er2e -ener5v2 -en5esi -e3ne4ss -en1et -en4e2t2t4 -e2n3eu -e3n4ew -en3g2i -e4ng -en3ic -en1i -en5i1er1 -en2ie2 -en3i1g3r2 -en2i2g -en5in -enit5u -e4n3k -en1o -en3oi -en2o2m -en3oty -en2ot -enov3 -e4n2s -ens5a2l -en3sp -en4s4u2m -en3su -en4sus -ent3a2r -en4te2r1s2 -en1te -enter1 -en5t2i2a2 -ent2i -en4ti3fy -en1t2i1f2 -en2t2o -en4tri -en1tr2 -ent5rin -ent5up -en1tu -en4tus -4en2u -en3u1a2 -en3uf2 -en3u4r -en5ut -5enwa -en5w -eo3b -e4o2ch -e2oc -e4oda -eof2 -eo2l -eol5ar. -eol2a -eola2r -eol5at -eolo3g2i4 -eo1lo -eo1l2o1g -e5olu -e2o3m -eon4a -e3ont -e1o2p4t2 -e1or1 -eor4de -e2or3e -eor5o -eo1s2 -eo4t2o -e2ot -e1pa -ep4al -ep5ar1c2 -e1pa2r -epa4t -epend5en -ep2e4nd -epende2 -ep5ert2 -eper1 -e4pete -e3pet -epe5ti1t2io2 -epe2ti -e4p5ex -eph1 -eph4i -e2p2ig -e5pl2a -ep2l2 -ep3lic -epol3a -e1po -epol3i -epol2it5 -ep3re1h2 -e1pr2 -epr2e -ep3res5e -e4p5ri4m -e4p5rob5 -ept3or -e2pt2 -e1p4u -e3pu4r5 -e4puta -equin4 -equ4 -eq2ui2 -equ2i5no -er1 -era4cie2 -er2a1c2i2 -era4do -er2ad -era4g -era4l -er3aph -er2ap -er3ap1i -er3a2p4y -4era4ti. -4era4tim -er5a2tu -er3bat -er1b -er2ba -er3b2e -er2b5os -erbo2 -2er1c2 -er3ch -er3cl2 -2erd -er2d5a2r -er4di4e2 -2er2e -er3eal -erea4 -4e2r4ed -er3e2gr2 -er5el. -er5ell2 -er5e4ls -e4re1m2e4 -er3en -5er2e4nd -er4en4e2 -ere5o2l -e3re1q -er3er1 -ere4s -er5ese -er3esi -er5este2 -erest2 -er5e2sti -eres5t4r2 -eret4 -er3et. -er3e4t1s -er3e2t2t4 -ere4v -er3ex -ergi3v -er1g2 -er1g2i -er3gl2 -er3ia. -er2i1a2 -er4ia2n -eri4ci2d -eri1c2i2 -5er5ick1 -er2i2d -er3ie2 -er3i2ff -er2i1f -er4i1me2t -erim2e -er3in -eri4n1a -eri4on -er2i3o2 -er3io4u2 -er4i2s4c2 -er4i5sta -erist2 -4eri2t -e3r2i4v -er5iz -4er1j -er2k4 -er3m2e -er4m2oi -er1mo -5ernacl2 -er2n2 -er3na2c -ern1a -er5nal1is -er1n3er1 -er1n3is -ern1i -ern3it -4e4ro. -er3o2i4d3 -e4r2oi -er4o5is -ero5st2 -erpent5in -erpe2 -er3pent -erpent2i -erre5l2a -e4r1r4 -erre2l -err2e -er4rep -er5s2ine -e2r1s2 -er2sin -er5t4ed -ert2 -er4ter1 -ert5er. -ert5e2r1s2 -er4th2i -er2t5iz -2eru -eru4b -er2u5d -eru4n2d5 -erun1 -er4vi4l -er1v2 -5erw2au2 -er1w -eryth3 -e2r2y -ery2t -2er2z -4es. -es5a4m -es5a2n -e2sc2 -es5ca2n -es1ca -es5che2 -es2ch2 -esci5e2 -es1c2i2 -escut5 -e2s1cu -e3se2a2 -e3se2ct -e5see -e5seg5 -ese4l -es5enc -e3sh4a2 -es2h -e1shi -e5shu4 -esi4a2n -es2i1a2 -es5ic. -e5s2ick1 -es5id3en -es4i2d -esi4de -esi5d2i1u2 -es5ies -esie2 -es3im -e2s3in -e5sion -e1s2io4 -e4s1it -es4it. -es4i4t1s -e3sk1in -es2k2 -e3s4mi -e2s1m4 -e2s4od -es3ol3a -es2ol -es3ol3u -es3on1a -es2o3p -e1sor -es3per3 -es5pir1a -esp2ir -es5pit -es4p2l2 -es3plen5 -esple2 -es5p2ot -es2po -e5s2pr2 -es4s3a2n -e4ss -es1sa -essa2r5 -ess5ee -es4sil -es2so -es2t1a4b2 -est2 -est3a2n -e5sta2r -es5t2au2 -e2sti -est5ifi -es1t2i1f2 -est5igati -est2ig -estig1at -e3st2oc -es5t2oo -est4r2 -es4tud4 -es1tu -e1su -e2s3ul -es4u4r5 -et2a -et3al. -et5all1is -etall2 -etal1li -et3al5o4 -eta5m2e -et2a2m -et2a3p -et3ar1i -eta2r -et5a2r2y -et4as -et3ate -et3ati -et5ay -et3eer1 -etel1l5i -ete2l -e1tell2 -etend5er1 -et2e4nd -etende2 -et5en1i -eter2 -et3er3a -et5er3i1a2 -e1teri -e3tex4 -e2th1 -ethy2l3 -eth2y -2e1t2i2a2 -e3t2ic1u -et1ic -e3t4i4g2i -et2ig -e5tim -et3in -eti4n1a -e3t2ir -et5i2t3iv -e3t2i4u2 -et5o1lo -e5tomet1e -et2om -etom2e -eto1me2t -e2ton -et3on1a -etor3i -et1or -etra5g -etr2 -4e4tral -etr2a5m -et4ra2n -et5re4s4s -etr2e -et1ri -et4r2i1a2 -etrib5a -et2r2ib -e4trim -et1ro -e2t2t4 -et3ter1 -etud4 -et3ud4e -e4tum -et4we -et1w -e2t5z2 -eu3d2i3o5 -eue4 -euk5 -4eum -e3ur1g2 -eu4r -eur5i -eus4 -eu5ten -eu3ter1 -eut3i -ev4abi -e2v3ab -e1v2al5e -ev2a2p3 -ev3ast2 -ev3at -ev5eli -e2vel -eve4n -ev5erat -ever1 -ev5er3en -e4v4er2e4 -ever4er1 -e4veri -e4ves -e1v2i1a2 -e4vi1ab -e2vic2 -evic1tu4 -e5vi2ct2 -evi2d3 -ev5ig -ev4ile -evi4l -ev5is2h -evi2s5in -evis5o -e4v2i1u2 -ev2oc3 -e1vo -evol5e -evo2l -evol5ute -evu4 -e1wa -e4wag -e5w4ay -ew1er1 -e3wh2 -ew5ie2 -ewi2 -ew1in -ew5is2h -e3wit -e1wr -ex5i4c -ex1i -ex4on. -ex1o -exo4n -1ex3p -4ey. -ey4as -ey1a2 -eyl4 -e4y3s2 -ez5er. -ezer1 -ez5e2r1s2 -ez5ie2 -ez1i -1f2a -2fa. -fab4i -f1ab -fa3ce2t -fa2ct2 -fa2c3u -2f3ag -fal2l5in -fall2 -fal1li -5falo -fa5lon -fals5ifie2 -fa4ls -fals2i1f4 -4fan3a -fa2n -fan5tas1iz -fan2t2a -fantas3i -fant3i -5fa2r -far3i -5faw -4f5b -2f5d -2fe. -3feas -fea2 -fe4a3tu -fe2b5r4 -fe1b -3fec -2f2ed1 -5fei -fe1li -fem3i -femin5 -fend5er1 -f2e4nd -fende2 -f5en1i -4f4e2r4ed -fer1 -f2er2e -fer3ee -3fero -fe5r2oc -fer5om -3fe4r1r4 -fer3v2 -2f4es. -fes2s3o -fe4ss -fest3a -fest2 -fe2st5i -fe4t -fet4al -fet2a -fet4in -fet4o -3feu -fe5veri -fever1 -2ff -f1fe -ffec4te -f3fec -ffe2ct -f5fe4t -f1fi -f5f2i1a2 -f3fic -f5fie2 -f1fi2l3 -f2f3is -ff4le -ffl2 -ff3lin4 -f3f2oc3 -f1fo -ffon1i4 -ffo2n -ff2or3e -f3fr2 -ffran2ch5 -ffra2n -4f5h -fi5ance -f2i1a2 -fia2n -f4ib5u -4fic. -4fi4c1al -3fi1c2i2 -4fi4c5s4 -fi5del -fi2d -fid3en -fiel4 -fie2 -fi2er4c2 -fi1er1 -figh2t5 -f2ig -fi2gh -1fi2l -2fin -fin2a -fi3na4l -f2i4nd3 -fin2e -f1i4ng -5finin -f2in1i -fin4n1i -f2in1n4 -fir2m1 -f2ir -f3it1a -f5it3ee -fl2 -3fl2a -fle2s -f3lica -flin4 -3flo -flo5ric -flor1i -3flu -flu1m4i -1fo -4fo. -3f2oc -fo2e -foet3i4 -fo1et -fo1l4i -fo4li2e2 -fomen4t4 -f2om -fom2e -fo1men -fo2n -fon4de2 -fo4nd -3f2oo -fo5r2a2m4 -for1a -for5ay -for5b -for4di -fore3t -f2or2e -5f2orm -for4m3a -fortu5n4a4 -f2ort2 -for1tu -for3tun1 -fo3v -1fr2 -frag5a -fran2t4 -fra2n -fra2r4 -frat2ch4 -fra2tc2 -fre4s -fr2e -fros4t5i -frost2 -fr4uc4 -2f3s -fs4p -2ft -f1t4ed -f4ter. -fter1 -f2t5es -ft2i4et -ftie2 -ft4ine -ft1in -3fu -4fu. -f4u4c -fuel5li -fuell2 -fug4a -fu4min -fu1mi -fu4n2g -fun1 -4fu2r4ed -fu4r -fur2e -fur3n2 -fu3sil5 -fus5o -fu5til -fut2i -4ga. -ga4cie2 -g2a1c2i2 -gadi4 -ga4d4os -ga2do -3gag -3g2ai2 -3g2ale -ga5len -gal2i4a2 -gal5ler1 -gall2 -3galo -gam4bl2 -g2a2m -ga2m1b -gan5at -ga2n -gan2a -4ga2n2ed -gang5er1 -ga4ng -g5ant. -gan4t4r2 -g5an4t1s -g5ar1c2 -ga2r -g4ar2e -gar3ee -gariz4a1 -gar1i -gar1iz -ga5r2ot -gar5p -5g2a4r1r4 -1ga4s -gas5i -g2as3o -gas2ol5 -gas2s5in -ga4ss2 -gast3r2 -gast2 -g1at -g4at. -ga2t5iv -g4a2to. -ga2to -g4atos4 -g4a2t1t4 -g2a2t5u -gaud5 -g2au2 -ga5z1a1 -g2az2 -g1b -g5d4 -2ge. -5geal -gea2 -3gea2n -2g2e4d -3gedi -5ge4d1n2 -4ge1f -1gel -4g2ele -ge4li -gel4in -gel5li -gell2 -ge4lu -2ge2ly -gem3i -5ge1mo -3gen -gen4du -g2e4nd -gen5it -gen1i -gen3o -gen5t2i -ge4o -geo3lo -geo2l -4g2er2e -ger1 -3germ4 -2g4es. -5ge4ss -gest5at -gest2 -3get -get3a -2g1f -2g1g -gg4a -g2ge -g5ge2dl4 -g2g2e4d -g3ger1 -g5ger3er1 -g4g2er2e -gg2i4a5 -g1g2i -g3gli -g2gl2 -g3glu3 -g5g2ly -ggr2av3 -g1gr2 -g4g4ro -2gh -g5h2ai2 -gha2 -gh5en1i -ghe2 -g3ho -g4hos -gh2t -1g2i -4gi. -gi4all2 -g2i1a2 -gi4at -3gib -g2i5c2o2 -g2i4g -gi5ga2n -1g4in5g2i -gi4ng -3g2io2 -gi4or -gi4ot -5g2ip -gi5pa -g4i4s -5gis. -gi2t1 -5gitu -giv5en. -2gl2 -g3la2r -gl2a -5glass. -gl4as -gla4ss2 -glec4 -3g2ler1 -g4leto -g1let -g4letr2 -g4ley -gli5on -gl2io2 -g5l1is4 -3glo -4g5lod -gl2om3 -4glop -3glu -glu5te -glu5t2i -3gl2yp2 -g2ly -2g1m4 -2gn2 -g1n1a -g4n1ab -g5nate -5gn4a2th -g5nati4 -gna5tu4r -gn2a2tu -gn5e2dl4 -g2n2ed -gn5ee -gn3er1 -g1n1i -g4n2i1a2 -g2n3in -gn4in. -g4n2i2o2 -g2no -5gnor1i -gno4s -2go. -5g2oa2 -3g2oc -5god -3goe -go4et -go4ge -g2o1g -4go3gr2a2m -go1gr2 -g5o2i4d -g2oi -g4o3is -go2m2e -g2om -5gon1n4 -go5n2om -3g2oo -goph4 -4gor. -5gor1g2 -4go2r1s2 -g4o2r2y -3gos -gos4t2 -2go4u2 -gour4i -gou4r -g1ous -gov1 -g3p -1gr2 -gr1ab4 -3gr2a2m -4gram2e -gr2a2p -g4r2e -gril4 -gri2m3a -g4ro -gr2o4g -g5ron -gr2op4 -3gru -gru3en -gr2u5i2 -gru2m4b -2g1s -gs4c2 -gs4t2 -g4sti -gth5en1i -gthe2 -g5to -g4u2a2 -gu5ab -5gua2n -3guar2d -gua2r -g5uat -2gue -5gueu -5guit4 -g2ui2 -gui5t1a -gu2ma -gu4mi -3gun1 -g4uras5 -gu4r -gur1a -g4u2r4ed -gur2e -gur4n2 -gur4u -4gu2r2y -gust5a -g4ust2 -2g1w -2gy -g4y2b -5gym -3gyn1 -gyn5o -g5z2 -ha2 -4ha. -h4ac -hadi4e2 -had4in4e -ha2d1in -hae3o -ha2g2e4d5 -ha3g2i3o2 -ha1g2i -hag5u -ha5ic -h2ai2 -hais4 -hak4ine -h2ak -hak1in -hal5ant -hal2a -hala2n -h2a4m -ha1m5a2n -han4cro -ha2n -han1cr2 -ha4n2g -h1an1i4 -h5an1iz -han4t -han2t3a -ha4pe -h2ap -hap3l2 -har1a -ha2r -har5b -har4d -har5die2 -har2ge4 -har1g2 -ha5ri2s1m4 -har1i -har3o -har4t4ed -hart2 -har4ti -has4te2 -hast2 -ha2t5o -haugh2t5 -h2au2 -hau2gh -ha2vel4 -h2av -hav5ersi -haver1 -have2r1s2 -ha1v5o -h1b -h1c -h1d -hdeac5 -hdea2 -h1du4 -he2 -4he. -h2ea2 -1head -3hea2r -hear2ch4 -hear1c2 -heas4t5 -heav5en -he2av -he2c3t4 -he5del -h2ed -hede2 -he3do -heek4 -h4ei -h4e3is -he5lat -hel2a -h5elin -he3l2io2 -he5l2i1u2 -hel4li -hell2 -h3el3o -hem1a -he3men -he1m2e4 -hemis4 -he5m4op -he1mo -hem4p -hende5 -h2e4nd -he3or1 -hep1 -h1er. -her1 -her4as2 -her2b -her2b3a -herb3i -here3a4 -h2er2e -here3o -h5er3e2t2t4 -heret4 -h5erh2 -her5ial -her2i1a2 -h5erin4e -her3in -h1erl -her5om -h4eron -h1e2r1s2 -h5erwa -her1w -hes3t4r2 -hest2 -het1 -h4et3a -het3i -het4t4ed -he2t2t4 -heu2 -h4eum3 -heumat5 -heu1ma -he4v4 -hev5i -hex5o -h1f -h5h -2hi. -hi4a2r -h2i1a2 -h1ic -hi3c4a2n -h4i4cin -hi1c2i2 -h4icl2 -h5ie. -hie2 -h1i1er1 -h4i4e2r1s2 -h1ies -h3ifi4 -h2i1f -h3i3fy -hig4o -h2ig -hi5ka -h2ik2 -hi4l -hi5ma4 -hi5mer1 -him2e -himos4 -hi2mo -h1in -h2i4n4d -h2in2e -hi5n2ie2 -h2in1i -h5in1iz -hi5nop -h2ino -h2i4n1s2 -hio5lo -h2io2 -h4i1ol -h4i1or -h2i2p -hip3l2 -h4ir -hi4r4r4 -hir3r5i -hit4a -h2iv5a -4hl -h3l2a -h1le -h3let -h1l2i -hl2i4a2 -2h1m -h4man3ic4 -h1ma2n -hman1i -h5mica -hm1ic -2h1n2 -hno1cen5 -hn2oc -hn4o3ce2 -4ho. -ho3a2n -h2oa2 -ho4c2o2 -h2oc -ho3don -ho2do -ho5du -ho5ep5 -hol3a2r -hol2a -hold1 -hol4is. -hol1is4 -ho5l4y2s -ho2ly -ho4mag -h2om -ho1ma -hom5in -hom1i -h2o4n -hon5em -ho2ne -ho5neu -hon3ey -hon2g3i -ho4ng -ho5n2io2 -hon1i -hon1o -1hood -h2oo -hoo5r -h4ope -ho2p5r2 -h4op4te -h1o2p1t2 -hor5et -h2or2e -h4or2n2 -horn5i -ho5r2o1g -hort5h -h2ort2 -hosi4 -ho4ton -h2ot -ho1t2o -h2o4u2 -3h2ouse3 -4h1p -2hr -hras5eo -hras2 -hr2as2e -hr2e4 -hre5ma -hr5er1 -hres4 -hri4 -hril2l5in -hrill2 -hril1li -hrim4 -h5rit -h3r2od -hrom4i -hr2om -h2r2y4 -h3rym3 -2h1s -hsi4 -h4s2k2 -ht5ag -ht5ee -ht3en. -ht5e2n1er1 -h4t4ene -ht3en1i -ht3e4n2s -ht5e2o -h2t5es -ht4f2oo -h2t3f -ht1fo -h1th -ht4ine -ht1in -hu4g -hu4mat -hu1ma -hu5mer1 -hum2e -hu4min -hu1mi -hun4c -hun1 -hu4n2k4 -hun4t -hur3i -hu4r -hu3s2i1a2 -huz4 -h1w -h4wart2 -hwa2r -h2y -hy2l -hyl5en -hyle2 -hy2m -hyn4 -hy3o2 -hyol5i -hy1pe -h2yp -hy3ph -hyr4 -hys3te2 -h4y2s -hys1t2 -hy4t -2i1a2 -ia4bl2 -i1ab -iab5ol1is4 -iabo2 -iabol3i -iab5ol1iz -i2a2ch -ia1c3o2 -i2ac2r2 -ia5cri -ia5d4em -i5ae -iaf4 -i2ag4 -i4a3g1n2 -i5a4g5o -ia3gr2 -i3ah -i5ai2 -iale2ct4 -i2ale -ia3lec -i3al1it -ial5li -iall2 -4ial1n4 -i2a3lo -i2a5ly4 -i5a2m1b -i2a2m -ia3m2e -ian2ch5 -ia2n -i3ant -i5ape -i2ap -ia3ph -i2ard -ia2r -4iarit -iar1i -i3at -ia5the2 -i4a2th -i5at2om -ia2to -i2a2t4u -iat3ur4a -iatu4r -i3au2 -i2av4 -ib3era -ib2e -iber1 -ib1i -i1b2i2o4 -ibios4 -ib5li -i2bl2 -4ibo2 -i4bon -ibor4 -i4bose -i5bo4u2 -ib1ri -ibr4 -4ibu -ib3uta -ic3ac -ic5a2do -i4c1al -ic1a2n -2i1ca2r -iccu4 -ic1c4 -4ice -i5ceo -4i2ch -ich4i -ich5i4ng -ich1in -ich5ol -i1cho -4icin -i1c2i2 -i5c2io2 -2ick1 -ic4lo -icl2 -2i2c2o2 -i3c2o3c -ic5ol3a -icon3o -icon1 -i5cop -icoty3le5 -i5coty -ic2ot -2i1cr2 -i4cri -i4cru -i4c2r2y2 -ic4te2dl4 -i2ct -ic1te -ict4ed -ic4ter1 -ict5ic -2icu -icu4lu -ic3um -i5cun4 -i5cut -2i1cy -i2d -id1a -i5d4ay -i1d4e4m -id3enc -id3era -ider1 -i3derm5 -i3d2icu -id3i1f -i5d2ig -i5dil4 -i3dim -id4ine4s -i2d1in -idin4e -idios4 -i3d2i1o2 -id2ir4 -id1i4s4 -id4ist2 -2i4d1it -i1di4v2 -id3li -i2dl4 -id3ol -i1do -idol3a -4idom1i -id2om -id3ow -4idr2 -id5ri -id3ul -i1du -ie2 -4iec -2ieg2 -ie3ga -ie5i -i5ell2 -4iem -2i1en -i2e4n2d -i1er1 -i3ere4s -i2er2e -i2eri -ier3i4n -4ier2n2 -ier2o -i4ert2 -i3e2sc2 -ies3e4l -i1es2t2 -i3e4st. -2i1et -i4et. -ie2t3ie2 -4ieu -i5eut3i -iev3a -iev3er1 -ie1v3o -2i1f -i2fe -if4f2a -i2ff -iff5ler1 -iff4le -iffl2 -i4f3ic. -i4fic3ac -i4f5i4c5s4 -ifi4d -i2fi4n -4i2fl2 -i3fo -i3f2oc5 -if5tee -i2ft -i3fy -2ig -i3gad -ig3a4nd -iga2n -3iga2r -i1ge -i3ger1 -ight5er. -i2gh -igh2t -ighter1 -ight5e2r1s2 -4i1g2i -ign5iz -i2gn2 -ig1n1i -ign2o5m -ig2no -i3gon -ig1or -ig3ot -i5gret -i1gr2 -ig4r2e -i4g5ro -i5gu5it4 -ig2ui2 -ig1u4r -2i1h -ih2y4 -2ii -i5in -i1ja4 -4iju -2ik2 -ik5a2n -ike4b -i2l3a -ila4g -ila5te2l -ila4te -i5l4ater1 -il4a4x -il5dr2 -il4du -i3len -ilesi4 -il3f -il3ia. -il2i1a2 -il3ia2r -ili4arl -i3li1c2i2 -i5l2i1en -ilie2 -ili4er1 -il4i4fe -il2i1f -il4ific -il1in -il5i2ne. -il4ine -4ili3o4u2 -il2io2 -il5i4p1p2 -il2ip -il5i1q -il4ite -il1it -ilit5u -il4mo -ilm2 -i5lon -il3o4u2 -ilth4 -il2tr2 -4ilu -il5ul -i5lum -il5ur2e -ilu4r -il3v -4ilym2 -i2ly -ima4c -im2ag -im3a4ge -im1al -i2m5a2m -i5m2as -i4mat4ed -i4ma2t3in -im2a2t5u -im1i -i3m2ie2 -im4ine -im5ino -im5me2s -i2m1m2 -imm2e -i2mo -i5m2o1g -i3m4on -im5oo -i3mos. -impar5a -im1p -im3pa -im1pa2r -impar2ad5 -im5p2ie2 -im2pi -imp2o2t5 -im3po -im5pr2 -im3pu4 -im1ul -im5um -in3ab -in1a -4inace -ina2c -in4a2do -in5a2gl2 -in3a2ir -in2ai2 -ina4l -4inal1it -i1n5a2m -in3a2n -in3ap -in4a2r2s2 -ina2r -i3nas. -in2as -4in2a2ta -inat1or5 -ina2to -in3au2 -in4aw -2inc -inc4t2u1a2 -in2c1t -inc1tu -2i4nd -in5da2r -in2da -inde5p -inde2 -inde4s5 -in1de3t -indeterm5 -indeter2 -in5dro -indr2 -4inea2 -4i2n2ed -in5ee -in5eg3a -4in5eo -ine4s -in3esi -ine5te -4ineu -inev5 -infilt5 -in3f -in1fi2l -infol4 -in1fo4 -4in3fu -4ing2a -i4ng -in5gal -4inge -ing5ha2 -in2gh4 -4in2g2i -4ingle -in2gl2 -4ingli -4in1go -4in1gu -in2g3um -2in1i -in5ia. -in2i1a2 -4inic -in4i1c2i2 -in3ion -in2io2 -in4itud -4i4n2k -ink4ine -ink1in -4i4n1l2 -2in1n4 -2ino -4i4no. -in3oi -i5nole -4inos -i3n4os. -in5ose -in3osi -4in1q -i4n1s2 -in4s2ch5 -ins2c2 -inse2 -inse2ct5 -in5sec -insec5u -in3si -5ins2k2 -insolv5 -ins2ol -in4tee -in1te -int5e4ss -in2t2es -in3til -int2i -int5res -in1tr2 -intr2e -intu5m -in1tu -2in2u -in5ul -in5um -in3un1 -in3u4r -invol5u -in3v2 -in1vo -invo2l -2io2 -io3a2ct4 -i2oa2 -i1od -iod3i4 -io2d5o -ioe4 -io3gr2 -i2o1g -4i1ol -io3ma -i2om -i4oman1i -io1ma2n -io3mo -i5ope -io3ph -i5o1po -io2p4s2 -i1or -ior2a4m4 -ior1a -4i2or2e -4iorit -ior1i -5ior1iz -4iorl -ior4n2 -io3sc2 -i3ose -i3osi -i4oso -i4o5sta -iost2 -i3ot -iot4a -i4o5th -iot5ic -i4o5tr2 -i4oty -i4our. -io4u2 -iou4r -i4ou2r2s2 -i5o4x -2ip -ip3al -ip2ap4 -ipar3o -i1pa2r -ipart5ite -ipart2 -ip1at -i3p2e4nd -i1ph2e4 -iphen3 -i5pheri -ipher1 -iphi4 -i4phu -ip3i2d -i5p2il -ip3in -ip4in4e -ip2ir4 -ip5is -ip1i4t -ip4iti -ip3lin -ip2l2 -ip3lo -i3po -i4p2o1g -i4poli -i4p2om -ip4o2n3 -i4pow -ip2p2l2 -i4p1p2 -ip3pli -ip4r2e -i1pr2 -ip5tor1i -i2pt2 -ipt1or -ip1ul -i5put -i2p4y4 -2iq -i3q2ua2 -iqu4 -2ir -ir1a -ir4abi -ir1ab -ira4c -ir4ae. -ir4ag -ir4al1in -ir4al1li -irall2 -i5r2a3so -iras2 -irassi4 -ira4ss2 -ir4ay4 -ird3i -ire3a4 -ir2e -ir3ec -ir5ee -irel4 -ire5li -ires4 -ir5e4ss -ir1i -ir2i4d -ir4im -ir4is. -5ir1iz -irl5i4ng -ir5o2ch -ir2oc -ir5ol -ir3om -ir4q -i2r2s2 -ir5ta -irt2 -ir5tee -irwo4m2e -ir1w -ir1wo -ir3w2om -i4sa -is5ad -is3age -isa4g -is1a2l -is3a4m -is1a2n -is3a2r -is5av -4i4s3b -i2s3c2 -is5chi -is2ch2 -isci5c -is1c2i2 -4i1sec -ise5cr2 -is3ell2 -4is3en -is2er1 -is5er2e -i2s3et -4iseu -is3ha2r -is2h -isha2 -ish5ee -ishe2 -4ish3io2 -ish3op -is5hor -2is2i1a2 -is5ic -is3ie2 -4isim -is3inc -i2sin -4is1is2 -is4ke2 -is2k2 -i2s1l4 -islun4 -2isma -i2s1m4 -is1on -is5on1er1 -is4o2ne -is2o5p -is1p -i3s2ph2 -5is1pr2 -2i4s1s -iss5ad -is1sa -is4sa2l -is5sa2n -is4s4iv -is1s4o -4ista -ist2 -is4tal -ist5enc -iste2 -ist5ent -is5ter3er1 -i4s2t2er2e4 -ister1 -4is1th -is4t3ic -4i4s2tl -i4s1to -4is4t2om -is1tr2 -3is2t4r2y -4is4ty -i5s2ul -is3u4r -2is2y -it1a -i2t5ab -ita4c -4i1t2ai2 -it3a2m -it4an2a -ita2n -it4as -it3at -i3te2ct -it3ee -it3enc -it3ent -it3era -iter1 -2ith -itha5l -itha4 -ith5i -i5thol -i1t2h3r -ith2y5 -2i1t2i2a2 -it2i4c2o2 -it1ic -it5ic1u -it1ie2 -it3ig -4i1tim -it4in. -it1in -i4t4i4n1s2 -4itio. -i1t2io2 -4itio2ne -i5t2i4q -4i5tit -i2t3iv -it4li -i2tl -it5lo -4i2to. -it5ol -2iton -it1o4u2 -2itr2 -it5re4s4s -itr2e -i4tric -2i2t1t4 -it4tit -it3ti -itu4a4ls -it2u1a2 -itu1al -it5ua2r -4itue -it1ul -it1u4r -it3us -2i1u2 -i3um -iur5e -iu4r -2iva -iv5anc -iva2n -iv1at -i4v2ed -iv5el. -i2vel -iv5el3i4ng -iv5e4ls -i4ver. -iver1 -iv3eri -i4vers. -ive2r1s2 -iver5sa2l -ives4 -iv3et -i4vie2 -iv3i1f -i5vil1it -ivi4l -ivil3i -5ivi4st. -i2v5ist2 -5ivi2s4t3s -iv1i2t -i2vo -iv2oc3 -i5v2or2e -2i1w -2ix -ix3o -i5ye -1iz -4iz2ah -iz1a1 -iz3i2 -2izo -iz5oi -2izz2 -1ja -2ja. -3jac -ja2c5o2 -jac3u -jag5u -jal4 -ja5lo -ja5pa2n -j2ap -j4apa -jel5l2a -jell2 -jeo2 -jeop3 -4jes -jeu4 -jew3 -2ji -3j2ig -jil4 -jill5 -5jis. -3jo2 -4jo. -jo1c5o2 -j2oc -joc5u -jol4e -4jr -4js -ju1di -j2ui4 -ju5l -ju3n2i -jun1 -ju2s1cu4 -j2usc2 -jut3a -ju1v2 -k4abi -k1ab -k2a5bu -ka2ch4 -k3a4g -kais5 -k2ai2 -ka4l -ka5lim -kal4is -k4a2n -k2a3o -k2ap4 -kar4i -ka2r -1kas. -kau4r4 -k2au2 -k2av4 -k1b4 -k1c -kc2om4 -k1c2o2 -k5d2 -k1do4 -kdol5 -4k2ed -ke5da -k5ede2 -3kee -ke4g -k2e4n4d -ken1o4 -ke2p5t2 -ker5a -ker1 -k4er2e -k5erel -k4er4j -ker5o -kes4i -ket5a -key4wo -key3w -k1f -kfu4r4 -k3fu -k3ho -5k2i1h -ki2l -kilo3 -k1in -k2in. -3k2i4nd -kinema4 -kin5et -k3i4ng -k2in4i -k2i4n1s2 -kir3m -k2ir -ki4r4r4 -kis4 -3kis. -k1is2h -ki2t5c2 -k2i4w -kk4 -k5ker1 -k2l2 -k3l2a -k5lea2 -k3ler1 -k3let -k3li -k3lo -k1m -kn2 -k2no -1kn4ow -k2o5a2 -kol4 -ko5m1i -k2om -ko5pe -k1p -k5ro4 -k3ru -4k1s -k3sl4 -ks2mi -k2s1m4 -ks4t2 -k1t -ku4r5 -k5v -k1w -3kyl -l2a -4la. -5la4a -lab5a2r -l1ab -l2aba -la1bel4 -l2ab2e4 -5lab1r4 -l4ac -la2c2a -la5ceo -la5cer1 -la4ch -la2c2o2 -5la5col -lac5on1 -la3cu -la4de -l5a2d1m -l4ae -l4af -la3ger1 -la4g4i4s -la1g2i -la2g3r2 -5l2ah4 -la4ic. -l2ai2 -la1ic -l4al -4l2ale -5laman3dr2 -l2a2m -lama4n5d -la1ma2n -la5mel1li -lam2e -lamell2 -lam4ie2 -lam1i -la1m1o -l5amu -lan3at -la2n -lan2a -la4n2d -3land. -land3i -3lan4d1s2 -lan4er1 -lan3et -lan5tine -lant2i -lant1in -lan4t4r2 -l2a4p -lap1i4 -lar5a2n -la2r -lar5de -4la2r4ed -lar2e -l4as -lat5al -l2a2ta -la4te -5latil1is -la4t1i4l -5latil1iz -5lat2in1i -la2t3in -lat5us -l2a2tu -l4au2 -5lau4r -lav5at -l2av -l4aw -4l2az2 -l3b -lb2e4 -l4bit -l4by -l1c2 -l2c1at -lce4 -l1cen4 -l4c2er2e -lcer1 -lch4e2 -l2ch -l3da2r -l3d2ed -l3de1h2 -l5dera -lder1 -ld3est2 -l5dew -ldi2 -l3die2 -ld4in4e -l2d1in -l5di5ne4s -ld3is2h -ldi4s1 -ld5li -l2dl4 -l3do -4le. -3leagu -lea2 -lea4g -le5a1t2io2 -leav5er1 -le2av -l3eb5ra -le1b -le2br4 -le3c2a -le5cha2 -le2ch -lect5ica -lect2i4c -le2ct -2l2ed -le5dr2 -leg1a -l3ega2n -3le2g1g -le4gin -le1g2i -leg3o -le3gra -le2gr2 -lek4 -4l4e4l2ed -l2ele -lel5o -le1lu5 -lem5enc -le1m2e4 -le1men -lem3is -l5em1iz -5le2m1m2 -l3e4m1n2 -le2mo -l4em5on -l5en2da -l2e4nd -len5da2r -lend4e2 -len4do -l4e1ne -le5n2ie2 -len1i -len3o -4len1t2io2 -lent2i -l4en5u -le3on -leo4s2 -le5q -2ler1 -le5rec -l2er2e -5l4er2i1a2 -l4eric -le5r2ig -ler3om -leros4 -ler3ot -4l4es. -le3s2c2o2 -le2sc2 -3le4s4s -1let -le5tra -letr2 -le5tr2e -5le5tu5 -leu4r5 -2lev -l3eva -5leve -lev5it2a -levi2t -le4wi2 -l5ex1a -1ley -lf5i2d -l2fo -lf3o2n -l1g2 -l4gal -l4gem -lg2i4a2 -l1g2i -l4gi2d -l4g2oi -l3h -4li. -li4an1i -l2i1a2 -lia2n -lias4 -lib1r4 -l1ic. -5l4i2ch -li4cie2 -li1c2i2 -5li5c2io2 -l3ic3on1 -l2i2c2o2 -lict4o -li2ct -l2i4cu -l3id1a -li2d -l4ida2r -5lid3i1f -3l4ieu -lie2 -l4i2fe -l2i1f -l4i3fo -lift5er1 -li2ft -1l2ig -li5ger1 -li1ge -light5i -li2gh -ligh2t -5l2i1h -3l2ik2 -1l4il -lil4i -li2m2b -limet4e -lim2e -li1me2t -lim4p -l4i2na. -lin1a -l4in2as -l2i4n4d -l4ine -5l4in3ea2 -lin4er. -lin1er1 -lin4e2r1s2 -lin4ger1 -l4inge -li4ng -l4in2g3i -5lingt -3l4in1gu -3l4in1q -lint5i -3li2o1g -l2io2 -l4i4ol -li2o3m -li3ot4 -li3o4u2 -5liph -l2ip -li2pt5 -l2i1q -3l2ir -l1is -l4is2k2 -5lisse -l2i4s1s -l1it -l2it. -l3it5a -5liter1 -3l2ith -5l2i1t2i2a2 -3l2itr2 -lit4u -l4iv -l5iv1at -l2iva -liv3er1 -liv5i2d -lkal5o -lka4l -lk5at -lk3er. -lker1 -lk3e2r1s2 -ll2 -l1l2a -ll2a4ba -ll1ab -lla2ct4 -ll4ac -l5l4as -l4l4aw -l5le1b -l1lec -l1leg -l3lei -l1lel -lle5m -l1len -l3lep -l3leu -l3lev -ll3f -l1li -lli5a2m -ll2i1a2 -lli4a2n -llib4e -llic4 -l4licl2 -ll2i5c2o2 -l5lie2 -llig1at4 -l1l2ig -l2lin -l5lin. -l3lin1a -l3l4ine -l5l2io2 -ll4i5v -ll3m2 -l1lo -lloc3a -l1l2oc -lloc5u -l1lo2q -l4lov -llow5er1 -ll3p -l4l3s -ll5t -l1lu -llun4 -l5ly1a2 -l2ly -l3ly1c -l3ly1g -l3ly1h -l3ly1i -l5lym2 -lm2 -l1ma -l1m2e -l4mer1 -lm3i4ng -l5m2i3p -l2m3od1 -l1mo -l1n4 -l3ne -lneo4 -2lo. -5load -l2oa2 -5l4ob3a -1l2oc -loc3al -loc2a -loc5ul -lo4cus. -lo1cus -2lo1cy -l3od1i4s2 -3lo3dr2 -1l2o1g -lo5ga2n -lo2ga -4loi. -l2oi -lo5m1i -l2om -lo2m4m2 -lon4al -lon1a -lo2n4e -l5onel -lo5ney -long5in -lo4ng -lon2g2i -3l4o1n2i1a2 -lon1i -lon2i4e2 -l3onis -l3on1iz -loom5er1 -l2oo -lo2o4m -loom2e -lop4e -5lo5pen -l3o2p1m -1lo1q -l4o2r4ed -l2or2e -lor5i3at -lor1i -lor2i1a2 -lor4i2fe -lor3i1f -lo5rof -loros4 -l4os. -lo1so -lo4ss4 -los5sie2 -lot5at -l2ot -loth4ie2 -l4oth -lo5tu -5lo2up -lo4u2 -lp1at -lp3er1 -lph2 -l5phe4 -l3ph1i4n -l2pho -l3p2ie2 -l3pit -lr4 -l3ri -l3ro -l5ru -4ls -l5s2a4m -ls5a2n -lsi4f2i1a2 -ls2i1f4 -lsi4m -ls4is2 -l5s2k2 -ls4p -l1s2t2 -lt4an3e -lta2n -l4ta4ng -lt5ant -l5ta2r -l1te -l4tei4 -lter2n3 -lter1 -lth3i -lt2i4c2i2 -lt1ic -ltim4a -l1tim -lt1in4 -lti3t -l3t4iv -lt4or -l1tr2 -ltram2ont5 -ltr2a2m -ltra3m4on -ltra1mo -l1tu -l4tus -4lu. -l2u1a2 -lu4ch4 -l4uc -lu2c5o2 -luc5ra -lucr2 -lu4cu -4lue -lu1en -lu5er1 -l2u1i2 -lu4it -lum4bri -lu2m1b -lumbr4 -lu4mo -5lum2p -lu2m5u -lunch5eo -lun1 -lun2ch -lunche2 -5lune -l3unta -lu3or1i -5l2up -3lur3o -lu4r -lus2k5 -lu4ss4 -lut5a2n -4lut5a2r -5lution1iz -lut2i -lu3t2io2 -lution1i -lu5t2oc -lut2o -lut5r2 -lu1v2 -lv5ate -l5vet4 -l4vi -l4vor -l1vo -l3w -lx4 -2ly -4ly. -ly1c -ly4ca -lyc4l2 -ly2c5os -ly1c2o2 -lym2 -lymph5 -lym1p -l2yp2 -ly4pa -lypt5o -ly2pt2 -3lyr -lys5er1 -l4y2s -3ly3w -3lyz -lz4 -4ma. -m4ac2a -mac3ad -ma5ch2in2e -ma2ch -mach1in -5mach2y -ma4cis -m2a1c2i2 -ma2ct4 -4mad. -4mada -4ma4d1s2 -ma4ge -5m4a2g1n2 -2mago4 -2m2ah -ma5ho -3m2a4i2 -4m4ai. -mai2d3 -5m2ak -mal3a4p -mal2a -mal5ar1i -mala2r -5m2ale2 -ma2l5ed -mal3e1f -m3al1g2 -m3al1is -m4al4is. -mal3le -mall2 -mal4li -2m2a2m -mament4 -ma3men -mam2e -m5ament. -1ma2n -3m4an. -man3a -man5da2r -ma4nd -man2da -man3dr2 -man3ic4 -man1i -man4ica -ma5n2il -m4a4n2s -man3te2l5 -man2te -2m2ap -m3aph -1ma2r -5mara2n -mar5ol -ma5ro4n -ma3r2oo4 -mar5ri -m2a4r1r4 -mar4shi -ma2r2s2 -mars2h -mar3v2 -ma3son -m2aso -massi4 -ma4ss2 -mass5i4ng -mas2s1in -3mas1t2 -ma4s4t4ed -maste2 -mast4ic -mas4t1in -m4at. -m4aten -m4a3ter1 -mater5n4 -m4at1it -mat4iti -m4atiz1a1 -mat1iz -m4a3t2o1g -ma2to -mat5om -ma3top -m4a4t1s -3m4a2t1t4 -ma5tur2e -m2a2tu -matu4r -m2av4 -2m1b -mba2t4t4 -m2b4d -m5bec -mb2e -m5ber3er1 -m2b2er2e -mber1 -m4be2r2y -m4be1s2 -mb2i -m2bic -m5b1il5 -m4b3i4ng -m4b2is -mb5ist2 -mbival5 -mb2iva -m5b2ler1 -m2bl2 -m3bli -mbru4 -mbr4 -mbu3l -mbu4r4 -m1c -m5d -m2e -2me. -mea5g -mea2 -me5a4nd -mea2n -me4ba -me1b -me4b2i -2m2ed -4med. -3me1d2i1a2 -med5ic1at -4medie2 -m5ed5ies -3med1it -me4do -m5e2d2y -me2g -5meg2a1 -meg1a5t -4m2ele -mel5ee -mel5ler1 -mell2 -mel3on -mel4t -melt5er1 -mel1te -me2m -4m5e1m2e4 -1men -3men. -2men1a -men4ag -mend5er1 -m2e4nd -mende2 -men1d5o -m4e1ne -ment5or -men2t2o -5men4t1s -5me2o1g -me4p -m5era2n -mer1 -4m2er2e -mer4i1a2 -2me2s -mes5en -me5s2i4a2 -mes5q -3me2sti4 -mest2 -1me2t -meta3t -met2a -met1e -4met4ed -meth4i -me2th1 -met1i4c -met5i1c2i2 -met3o -met3ri -metr2 -m1f -4m3h -4mi. -m1ic -m4i4cin -mi1c2i2 -m2i3c2o2 -3micro -m2i1cr2 -m4i2ct -mi3cul -m2icu -mi4cus -m4idi -mi2d -mi2d4in -mid5on -mi1do -mi5fi -m2i1f -mig5a -m2ig -migh5ti -mi2gh -migh2t -mi2gr2 -4mij -mi5ka -m2ik2 -m2il -m3i2l3a -mil4ad -4m5ilie2 -mil5ies -3mill2 -mi5lo -mil4t -3m2im -mim5i -5m2i4n4d -mind5er1 -minde2 -min4er. -min1er1 -min4e2r1s2 -m4ing5li -mi4ng -min2gl2 -min5ie2 -m2in1i -m4init -min3ol -m2ino -1m4int -minth5o -minth2 -m2i3o2 -m2i3p -mir1ab4 -m2ir -mir1a -mi5r2acu -mira4c -m2is. -m4i2s3c2 -mi4se -4misem -mis3ha2 -mis2h -5missi -m2i4s1s -m3i4st. -mist2 -mis4t1in -m3i2s4t3s -mi2t -m5ita2n -mit1a -4mity -3m2i1u2 -5m2ix -4m1l -mlo5cut2i -m1l2oc -mlun4 -2m1m2 -mma4n4d -m1ma2n -mmand5er1 -mmande2 -m3medi -mm2e -m2m2ed -mmel5li -mmell2 -mmet4e -m1me2t -mm2ig3 -mm2in3u -mmis3 -mmob3 -m1mo -m5m2oc -mmor3 -mmut3a -4m1n2 -mn2i1f4 -mn1i -m4nin -mn2i5o2 -mnis4 -mno5l -1mo -4mo. -2m2oc -mod1 -mod5ifie2 -mod2i1f -mogast4 -m2o1g -mo2ga -mo3ga4s -mo4go -mo2g5ri -mo1gr2 -m5o2ir -m2oi -mok4i -mol3a -4molog. -mo1lo -mo1l2o1g -4molo2g1s -4molo2gu -mo3ly -m2o1m -mo4mis -mom1i -m4on -mon1a4 -4m4o2n2ed -mo2ne -mo4n1g -mo4no -mono1lo4 -monolo3g5i -mono1l2o1g -m4op -mophil5i -mo5phi4l -m1o2p4t2 -m3or1ab -mor1a -m3orat4 -mor4a2to -m5ord -mo5rel -m2or2e -3mor2i1a2 -mor1i -m5or1iz -mor5on -3morp -3mor2se -mo2r1s2 -mor5tal -m2ort2 -m4o3sp -5most2 -m4o3sta -2m1ous -mo4u2 -m1p -m3pa -m4panc -mpa2n -m4pant -mp4a2th3 -mpel5li -mpell2 -m5per3er1 -m4p2er2e -mper1 -mp4er3i -mpet5it -m3pet -mpe2ti -mphal5o -mpha2 -m4phe4 -m4p4h2l -m2pi -mp5i2d -m5p2ig -mp3i2ly -mp2il -mp1in -m3p2ir -mp3is -m3po -mpol5it -mp2o2t -mpov5 -mp3to -m2pt2 -mp5tr2 -m3pu -m5q -m3r -m4r2y -4m1s -m5sel5f -m5si -ms2ol4 -mtu4 -muc4k4e -m4uc -muck1 -mu2ff4 -muf2 -mul1t2 -m5unc -mun1 -mu5n2io2 -mun2i -mun3is -mus5c2o2 -m2usc2 -mu4se -mus5ke2 -mus2k2 -mu3til -mut2i -m1v -m3w -2my -5my3c -my4d4 -my3e -3my1i -5mys1t4 -m4y2s -3myt -n1a -2na. -na2c -na2ch4 -na5cious. -n2a1c2i2 -nac2io2 -nacio4u2 -na5ciou2sl4 -nac4te -na2ct -nac5t2iva -nac2t1iv -na5cular1i -nacu1l2a -nacula2r -na4d4a -nadi4 -nad4op -na2do -n2ae. -naf1fi2l4 -na2ff -naf1fi -nag4a -n4a3gen -5n4a5geri -nager1 -na4g2i -n5ago -5n4a3gr2 -5n2ah -5nail -n2ai2 -na5iv -n2ak2 -4na2l2ed -n2ale -n5al1g2 -n4al2i1a2 -n2a3ly4 -1n2a2m -3nam2e -na4m4n2 -na5n2as -na2n -n1an2a -nann2ot4 -nan5no -nan1n4 -nan4t2a -nan5t4ed -nan2te -nan4t2o -n2a5o -4n4ard -na2r -nar5tisti -nart2 -nar2t1is -nartis2t2 -n2as -nas5i -nas5p -na4s3s2 -nas5te2 -nast2 -nat5al -n2a2ta -na5ta3t4 -n4atee -na3the2 -n4a2th -nat4h4l -nati4 -n4a4ti. -nat5i2c -n4a2to. -na2to -na3t2om -na4tos4 -n2a2t4r2 -na5tur1i -n2a2tu -natu4r -naugh5ti -n2au2 -nau2gh -naugh2t -naus3 -3naut -naut3i -n2a2v -na5vel -n3b4 -nbarric5 -nba2r -nb2a4r1r4 -nbar3ri -nbe2au4 -nb2e -n3bea2 -nbe4n -nb4e1ne4 -nbet4 -nbit4 -n1c2a -n4cal. -nc1al -ncarn5at -n1ca2r -ncar2n2 -ncarn1a -ncel4i -ncen4t5ri -n1cen2 -n3cent -ncen1tr2 -n4cept. -n1cep -nce2pt2 -n3cer1 -nc2er4e -n4c4es. -n5ce2t -n5cey -n3cha2 -n2ch -nch4ie2 -n3cho -nch5o1lo -n3chu -n4cic -n1c2i2 -ncid5en -nci2d -n4c2i3f -ncip5ie2 -nc2ip -n1c2l2 -n4cles -nc2le2 -n3c2oc4 -n1c2o2 -nco5pat -nco1pa -n1cr2 -n2c1t -nc4t1in -nct4ivi -nc2t1iv -nct2o -n1cu -ncu4lo -n4cun1 -n4curvi -n1cu4r -ncur1v2 -nc4us4t2 -n1cus -4nd -n2da -n3d2a4c -n3dal -n4d4ale -n3d2a2m -nd3anc -nda2n -nde2 -n3dea2 -nde3c2i2 -n3dec -n1d2ed -nde4l -ndeleg4 -nd2ele -nd3enc -ndepr2e4 -nde1p -nde1pr2 -n3derl -nder1 -nde4s -ndes5cr2 -n5de2sc2 -n5dez -nd4hi -n4d1h -n1dic -nd2ic5u -ndid5a -n1di2d -n3die2 -nd5i2ly -ndil4 -nd4in4e -n2d1in -nd3ise -ndi4s1 -nd5is4i -nd5ism. -ndi2s1m4 -n2d5ity -nd3ler1 -n2dl4 -nd1li -n5d2oc -n1do -nd2or4 -n2do4u2 -nd5ou4r -ndrag5 -ndr2 -n3dr2a2m4 -n5dron -ndu4b -n1du -nduc2t5iv -n3d4uc -ndu2ct -n4dun1 -nd2we -n2d1w -n3dy1i -n2d2y -2ne. -ne3alo -nea2 -n3ea2r -ne2b3u -ne1b -5neck1 -ne4cl2 -ne2c2o2 -n5ec1t2om -ne2ct -2n2ed -3nedi -ne4du4 -neg3a -ne3go -5negu -n4eis4 -2n2ele -ne5l2i1a2 -ne1l2i4g -n4e2ly -ne2mo -4n1en -n3e4nd -neo3l -neon4 -ne2p -n1er1 -4n4e2r4ed -n2er2e -5neri4ng -ner3in -ner5o -ne4r4r5 -ner2v2 -ner2v5in -2n4es. -n1e2sc2 -ne3s2i1a2 -1ne4ss -n1est2 -nes3t4r2 -net3a -net3ic -ne4t2o1g -net1r2 -neuma5to -n4eum -neu1ma -neut5r2 -nev5er1 -n4ew -news3 -n4eys. -ne4y3s2 -n3f -n1fo4 -nform5er1 -n5f2orm -nform2e -nfor1tu5 -nf2ort2 -nfra2n3 -n1fr2 -4ng -ng2a -n4gae -n5gee -n3geri -nger1 -n5gero -n2gh4 -n2g2i -n5gic -ngi2o4g -n3g2io2 -n5glem -n2gl2 -n3glie2 -n5gl2io2 -n2g1n2 -n1go -n4g2r2y -n1gr2 -n1gu -n2gum -n1h2 -nh1ab3 -nha2 -nho4 -nh2y2 -nhy1d5 -n1i -4ni. -3niac -n2i1a2 -ni3ba -n4ic1ab -ni4cen2 -n4ice -4nicl2 -nic2t5a -ni2ct -ni4cul4 -n2icu -ni4d2i1o2 -ni2d -n2ie2 -ni4e2r1s2 -ni1er1 -ni2f4f -n2i1f -ni2ft4 -nif5ti -n2i2g -night5i -ni2gh -nigh2t -n3i2g1m4 -3ni2gn2 -nik5e -n2ik2 -n2il -ni2l4a -n3im1 -n4im2e -5ni1me2t -n4ine4s -nin4j -5n2in1n4 -n4in2u -5n4i1ol -n2io2 -ni1o4u2 -3nipu -n2ip -5n2iq -n4is. -n4is2k2 -ni2s4l4 -nis4o -n5i4s1s -nis5ter. -nist2 -niste2 -nister1 -nis5te2r1s2 -nit2ch4 -ni2tc2 -ni4te -ni3tho -n2ith -n4itos -ni5tra -n2itr2 -nit5res -nitr2e -ni3tri -nit4u4r -n2iv -n2iv4a -ni3vo -niv2oc4 -niz5en -n1iz -n1j -nj2a2m2 -n1ja -njur5i -nju4r -4n2k -nk5a2r -n5ker5o -nker1 -n3key -nk5i1f -nk5i2l -4n1l2 -nla4n2d5 -nl2a -nla2n -n3le -n1let4 -n3m -nm4a3ter4 -nmor5ti -n1mo -nm2ort2 -n1n4 -nne4 -nnel5li -nnell2 -nnerv5a -nn1er1 -nner2v2 -n3n1i -nni3ki -nn2ik2 -nnov3 -n5ny1i -n2ny2 -4no. -n5ob2i -no5b1il -no2b4l2 -no5blem -nobser4 -no4b1s2 -n5ocu1l2a -n2oc -no4di -n4o2d2y -noe4c -no4f2a -nois5i -n2oi -n4ois -n5ol. -no3l2a -nol4i -no2m3al -n2om -no1ma -1nom1i -no2mo -4no2ne -3n2oni4c -non1i -5nood -n2oo -nop5i -nora4t -nor1a -nor5di -nor4i1a2 -nor1i -nor4is -nor3ma -n2orm -n4oro -n2or4t2 -n4os. -nos4o -n4o3sp -not1a -n2ot -3note -n1o4u2 -n4oug4 -3n2oun1 -2nous -nou5v2 -nova4l -nove2 -no2v3el -no4v2el5e -n4ow -now5er1 -now3l -n3p4 -np2il4 -n1pl2a4 -np2l2 -npoin4 -n1po -np2oi -npo5l2a -n3p4os4 -npri4 -n1pr2 -n1q -n4que1f -nqu4 -n1r -nre4i4 -nr2e -nre3m -nres5t4r2 -nrest2 -4n1s -ns2c2 -n2s2c2o2 -ns3c2ot -n4s1cu -n5sec -nsec4te -nse2ct -n2s2es -n5seu -n3s2h2 -n2si -n4s3ib -n4sic -n5s2ick1 -n3s4i2d -n3sie2 -ns5ifi -ns2i1f4 -ns3i4ng -n2sin -n3s2io4 -n3s2is2 -ns1i2t -n4s3iv -nsolu4 -ns2ol -n5son -n4s2or2e -n4s3o2r2y -n3sp2ir -n3s2t2 -nsta4 -nstil4 -n3su -nsur4e -nsu4r -n3swa -nsw2 -ntab4u -n2t1ab -nt3age -nt1al -n4t3anc -nta2n -nt5a4nd -ntan5eo -nt4ane -n4t3ant -nt4ar2i1u2 -nta2r -ntar1i -n5tas1is2 -ntas3i -nt3as4t2 -nt1at -nt5a2th -nt3ati -nt5a2t1iv -n5t2au2 -n1te -n4tec -n4tee. -n4tees -n3te2l -ntend5en -nt2e4nd -ntende2 -n4te2o -n4ter. -nter1 -n3teri -n5ter2n2 -ntern5al -ntern1a -nter5nat -nth2 -n1the2 -nt4her5 -nth5in2e -nth1in -nt2i -n2t4ib -n4t3ic. -nt1ic -n5ticis -nt2i1c2i2 -n5tic1iz -n4ti4c5s4 -nt2ic4u4 -n3ti2d4 -n1tie2 -n4tify. -n1t2i1f2 -nti3fy -n3t2ig -nt5ilati -nti2l3a -n5t4ill2 -nt3i4ng -nt1in -nt5ing. -n1t2i3p -n4ti1pa2r -n4tis. -n2t1is -nt3i2s1m4 -nt3is2t2 -n5ti1t2io2 -nt3iz -n2tj -n1t2o -n3t2om -nton1i4 -n5top -n1tr2 -ntr2a3d -nt3ral -n4tran2t -ntra2n -n3trat -nt5re4s4s -ntr2e -n2t3ril -ntrol5ler1 -ntrolle2 -ntroll2 -n5trym -n2t4r2y -n1tu -n3t2u1a2 -ntub5 -ntup5li -nt2up -ntup2l2 -n5tu4r -n2ty -n2u -n2u1a2 -5n4uc -3nud -nud5i -nu3en -nug4a -n2u3i2 -nu4is -5nuk -n4u1lo -n3ult -nul1tim5 -nu1m2e -5numenta -numen4t -nu1men -5numer1 -5nu1mi -3nunc -nun1 -nu3tat -n5u2t1iv -nut2i -nu4t2o -nu1tr2 -n3v2 -nve2 -n2vel3 -nv4e1n4e -nven5o -nver2s5a2n -nver1 -nve2r1s2 -nvi4t -nvoc5at -n1vo -nv2oc -nvoc2a -n5w -nwin4 -nwi2 -n3w2om4 -n1wo -n2x4 -2ny2 -5nyc -nym5i2t -nyth4 -n1z2 -nzy4 -2oa2 -o5ace -o3a2ct -oad5er1 -oad5i -o3ag -oak5er1 -o2ak -o3ales -o2ale -oal4i -oal5in -o5al1it -oan4t -oa2n -oap5i -o2ap -oar5er1 -oa2r -oar2e -oar4se -oa2r2s2 -oast5er1 -oast2 -oaste2 -o2a2t5a -oat5ee -o4at5er1 -4oba -o1be4l -ob2e -ob2i -ob3i4ng -2o1b2i3o2 -ob3it -o3bl2a -o2bl2 -ob1li -4obo2 -ob3oc -o5bol -o5b2ot -o3bra -obr4 -obr2om4 -o2b5t -ob3ul -o3bus -2oc -oc2a -o4c1ab -o3cad -oc5ag -o5cal1li -oc1al -ocall2 -o4c5a2t1iv -oc1at -oc5a2to -4o3ce2 -o4cea2 -ocen5o -o1cen2 -ocess4i -o5ce4ss -och4e2 -o2ch -och5in -o3ch2o4n -o1cho -ochro4n -o3c2hr -o5chu -oci3ab -o1c2i2 -oc2i1a2 -oci4al -o1cl2 -o2c2le2 -o1cr2 -oc2r2e3 -o2ct2 -oc2te -oc1to -ocu4lu -ocum4 -oc5uo -ocu4ss4 -o1cus -ocus5si -oc3ut5r2 -o1cy -o5cy4t -ocyt5o -od3al. -o3de4c -o5de3g -ode4ga -o5d2e4n4d -o3dent -odes4 -od3ica -o4d1ie2 -od3iga -od2ig -od4il4 -od1i4s2 -o3d2is5i1a2 -od5it -5od1iz -od3li -o2dl4 -o2do -od5o1lo -o2d5ous -odo4u2 -o3dro -odr2 -od5ru -o2du -odu5cer1 -o3d4uc -o4duct. -odu2ct -o4duc4t1s -od3ul -o5dyt -o2d2y -oe3a2 -oe4b2i -oe1b -oe5cu -o2e4d -o5ee -oe5ic -o3elec -o2ele -oel1li4 -oell2 -oelo4 -oe3o4p -oep5 -o5eq -o3er1 -oes3t2 -o1et -o4et. -oet3i -oet4r2 -3oeu -o3ev -o3ex -o3flu4 -ofl2 -4o1fo -o4ful -o3fu -ofun4 -2o1g -o2ga -o3g2a2m -og5a2r5 -o3ga4s -o3gen1 -o5gey -o3g2i -o4g2io2 -og2n1a -o2gn2 -ogon1i4 -o4g2ot -o2gri -o1gr2 -o4g4ro -og4s2h -o2g1s -o2gu -o5gyr -o2gy -o1h2 -o3ha2 -oh1ab3 -o3he2 -oher4er1 -oher1 -oh2er2e -o3ho4 -oh2y4 -2oi -oi4c -o3ic. -o4i5ch -o2i4d -4oide -o2ig4 -oi5ki5 -o2ik2 -oi2l3er1 -oil5i -oin3de2 -o2i4nd -o3i4ng -oin4t5er1 -oin1te -oin4tr2 -o2i4o2 -4ois -o3i2s1m4 -oi4t -oit4al -oit1a -o2ith4 -o1j -ok4ine -ok1in -ok3l2 -ok5u -ol4a4c -ol2a -o4lack1 -o5lali -ol4al -ol4a2n -ola4n5d -ol5ast2 -ol4as -ol4at5er1 -ola4te -ol5ch -ol1c2 -ole2c4 -ol5e1c2i2 -ol5efi -ole1f -o3leo -ole4on -o3lep -o2l1er1 -o3lest2 -o3leu -o1l2i1a2 -ol3ica -o3l4ice -ol5ic1iz -oli1c2i2 -ol5ick1 -ol3i4c5s4 -ol5id. -oli2d -oli2e2 -o3li1er1 -ol5i4es. -o5l2i1f -ol4i4f3e -oli5go -o1l2ig -o5lin1a -ol3i4ng -oli5os -ol2io2 -ol5ip4 -ol1is4 -ol2it -olle2 -oll2 -ollim3 -ol1li -ol4lop4e -ol1lo -ol4ly1i -ol2ly -ol3mi -olm2 -o1lo -4o1l2oc -ol3o2i4d -ol2oi -o4lon1a -ol5on5el -olo2n4e -ol1or -o3los -ol1o4u2 -4ol1ub -o3lu1mi -o5lun1te -olun1 -ol3us. -oly3ph -o2ly -ol2yp2 -4olyt -2om -o1ma -o4m1ab -o2mac -o2mal -o4mane -o1ma2n -o3mas1t4 -o3mat -om4b2e -o2m1b -o2m2e4d -om2e -ome4g -o5meg5a1 -ome3li -o2m3en1a -o1men -omen4t -o3meri -omer1 -om1i -o3m2i1a2 -om1i2c -om2i1c5r2 -om4ie. -omie2 -om2il4 -om4i2ny2 -om2i4s1s4 -om2i2t -omm2e4 -o2m1m2 -om2n1a -o4m1n2 -omn1i3 -o4m2oi -o1mo -omoli3 -o2m4o4n -om5o2ny2 -o4mos. -omo2t5iv -om2ot -o2mo4u2 -om5p2il -om1p -om2pi -ompt5er1 -om2pt2 -ona4d -on1a -on3ai2 -o5nas. -on2as -onast5i -onast2 -on5a2t1iv -onati4 -4on2au2 -on1c -onc1at3 -on1c2a -on4cho -on2ch -5ond5a2r -o4nd -on2da -ond5ent -onde2 -on3der1 -on3dr2 -on5d2y -o2ne -4onea2 -onec4r2 -4o2n2ed -on1ee -on5ell2 -o3neo -o1n3e4ss -on1et -ong3at -o4ng -ong2a -on4gu -4on1h2 -4o1n2i1a2 -on1i -on5ia2r -2oni4c -onic5a -oni4c1al4 -on4i2d -on3ies -on2ie2 -on3i1f -o5n2i2g -o1n2io2 -on4k4s -o4n2k -4onnes -on1n4 -onne4 -on5o4di -on5oi -o1no4m1i -on2om -4o5nom1i2c -ono3s -o5not1a -on2ot -o4n1s2 -2ont -ont5a2ne. -ont4ane -onta2n -on4ter1 -on1te -onti5fi -ont2i -on1t2i1f2 -onton5 -on1t2o -on1t4r2 -on4tr2e -on5u4r -on2u -o5nus -onvo5lu -on3v2 -on1vo -onvo2l -on2z2 -2oo -oof3er1 -o2o1i -ook3er1 -ook3i -oo4le -ool5i2e2 -o2o4m -oon3i -oo2p -oop4ie2 -oop1i -o3o2p1t2 -oo4se -oost5er1 -oost2 -ooste2 -o2o2t -oot3er1 -ooz5er1 -o2oz -o1pa -o4p1ab -o5pali -opa5ra -o1pa2r -op4a2th5 -o5pec -ope4n2s4 -op1er1 -3opera -4opera4g -o1pha2 -o4phe4 -oph4ie2 -o5phi4l -op5hol -o1pho -o1ph2y -ophy5l2a -o3phy2l -op1i -op3ies -op2ie2 -op5i4ng -o3p2it -4op2l2 -oplast4 -o1pl2a -opl4as -o4p2oi -o1po -opol3i -op4o2n4 -o2p5o2ny2 -op5or1i -op2oun4 -opo4u2 -o2p5ov -op2p2l2 -o4p1p2 -op5pli -oprac4 -o1pr2 -op3ra2n -opr2e4 -opro4l -op5r2op -op5so -o2ps2 -1o2p1t2 -op2ta -op1u -o5qui3al -oqu4 -oq2ui2 -oqu2i1a2 -or1a -or5a2do -or2ad -ora4g -o5r2ai2 -or5al -4ora4ls -or2a2m4 -oran3e -ora2n -orat1or5 -ora2to -or2b3in -or1b -orb2i -or4ch -or1c2 -orch3i -or4du -2or2e -or5ead -orea4 -ore5a2r -ore5c2a -ore3f -ore3g -or3ei4 -oreo5l -or3e2sc2 -ore3s2h -or3e4ss -orest5at -orest2 -or5este2 -or5e2t2t4 -ore4v -5orex -or4fr2 -or1f -or5gn2 -or1g2 -or1i -4o4ri. -or3ia. -or2i1a2 -4oria2n -ori4c2i2 -ori5ci2d -or2i1en4 -orie2 -or3i1f -5or2ig -ori5ga -or4i4no -4orio. -or2i3o2 -or5ion -4orios -ork5a -or2k -2orm -orm1i -or3n4a -or2n2 -5or1nis -orn1i -or3nit -or3o2ne -o5r2oo4 -or5ose -or5oso -or1o4u2 -orre2l3 -o4r1r4 -orr2e -orres3 -or4sc2 -o2r1s2 -or4sey -or2se -or4sti -orst2 -2ort2 -ort3a2n -ort3at -ort3er1 -or5t4es. -or2t2es -or3th2i -or4t2h2r -or4tit -or2t3iz -or4t1or -or1t4o -or5tra -ortr2 -ort3r2e -4or1u -or4un1 -or2y5p -o2r2y -o3s2a5i2 -os3a2l -osa2r5 -o1sc2 -os4ca -os4ce -o2s2ch2 -o4s1c2i2 -osclero5s4 -oscl2 -osc2le2 -osc2ler1 -o3sec -osec3u -ose5g -os5enc -o3se4n2s4 -os5eo -oser4 -o2set -os5eu -o3s2i1a2 -osi4al -osi4a2n -os5i4de -os4i2d -o3si1er1 -osie2 -os5i1f4 -o2s1in -o4s1is2 -o5ske2 -os2k2 -o5son -o3soph -os2o2p -os3o1po -4osp -o3spec -os1pi -os4sa -o4ss -oss5a2r -os4s2it -4osta -ost2 -ost5age -os4ta2r -os5tee -oste2 -os5ten -osten5t -ost5ica -ost1ic -os3til -o5stom2e -ost2om -ost3or -4osu -os1u4r -2ot -ot3a4g -o5tal1it -ot3a2m -ot4an3ic -ota2n -otan1i -o3t2ap -ot4a1t2io2 -o5t2a5v -o3t4a4x -o4t4ed -oter4m -oter1 -ot5est3a -o2t2es -otes4t2 -4oth -othal2a2m5 -otha4 -othal2a -oth5er3in -ot4her1 -othe2 -o5therm -otherm5a -o5thor -o5t2i2a2 -o5t4ill2 -5ot5in1iz -ot1in -ot2in1i -o2t4iv -o3t2iva -o5tivi -o1t2o -o5to2n4e -o4tor2n2 -ot1or -o4to4u2 -4o1tr2 -otur1i4 -otu4r -oty3le2 -o4u2 -5o2u3a2 -oub2 -ou5br4 -ou5c2a -o4uc -ou5c2o2 -oud5i -4oue -ou3e4t -oug4 -ou5ga -ought5i -ou2gh -ough2t -ou5g2i -oul4t -oult5i -ou3m -2oun1 -ou4n2d -oun2d5a -ound5e4l -ounde2 -oun5gin -ou4ng -oun2g2i -oun3tr2 -oup5li -o2up -oup2l2 -our3er1 -ou4r -our2e -ou5sa2n -2ouse -5ous2i1a2 -ou4ss4 -out5is2h -out2i -ou2t1is -ou4v5a -ouv2 -o1v2a3le -o5va2r -4ovati -ov5el3i4ng -o2vel -o4ver. -over1 -o5ver3b -ove2r3s2 -ov4ete -ovid5en -ovi2d -o1vis -ovis5o -o2v5os -o1vo -ow3ag -ow3a2n -o5w4ay -owd4i -ow2d3l4 -ow1el -owel5li -owell2 -ow5ha2 -owh2 -owh2ith4 -ow1i2 -ow5in -owi5n2e -ows4 -ow5s2h -ow5sl4 -ow5y2 -o4x -ox3i -oxic5ol -oxi4c -ox2i2c2o2 -ox5o -2oy -oy5a2 -o4y2s4 -2oz -o1zo -oz2o5i -o3zy1g -4pa. -pac4te -pa2ct -pa5do4u2 -pa2do -pad4r2 -pae4s4 -pa3ga2n -4pag1at -pag4ati -pain2 -p2ai2 -4pairm -pa2ir -pa5la2n -pal2a -pal3in -pa3lo -p4a4ls -pan5a2c -pa2n -pan2a -pan1e -pan3i -p4a4pa -p2ap -pa3pe -pap3u -pa3p4y -1pa2r -para5s2 -par3l -pa3r2oc -pa3rol -par5o4n -1p4as -pass5ive -pa4ss2 -pas4s1iv -pas1t2 -pas4t1in -p4a3ter1 -pa2t3i4n -p5a2to -pat4ric -p2a2tr2 -pat1ri -pa5tric2i1a2 -patri1c2i2 -5p2au2 -paul5e -pa2u3p -pa5vi4l -p2av -5paw -pawk4 -paw5ki -2p1b -p1c4 -p5d2 -2pe. -pear4l5i -pea2 -pea2r -pe4c2o2 -pec4tu -pe2ct -2p2ed -5ped3a -3pede2 -3pedi -ped3i4s1 -3pe4d1s2 -pe2du -p4ee -pe2f -4p2ele -pe5le3o -pel5v4 -pen4at -pen1a -5p4enc -pend5er1 -p2e4nd -pende2 -pen5dr2 -pen4ic -pen1i -3p4en1n4 -pens5ati -pe4n2s -pens2a1t -p4en5u -pe5on -5p2er1c2 -per1 -per3cent5 -per1cen2 -4p2er2e -perem5i -p4eri -5p4er3n2 -p3eron -per4os. -per5t1in -pert2 -per2t5is -per3v2 -p4e2r2y -2pes -pe4s4s3 -pes5til -pe2sti -pest2 -3pet -pet5all2 -pet2a -pet3en -pe2ti -pet3r2 -pe4wa -4pex -p1f -p5g -2ph. -4phae -pha2 -pha5g2e4d5 -ph5al. -ph2a2n -phant5i -phan4t -phe4 -ph5esi -ph3et1 -3phib -4ph1ic -1phi4l -ph1i4n -ph1is -ph2i5th -p4h2l -1pho -4pho2bl2 -4ph4o2n2ed -ph2o4n -pho2ne -3phor -ph5or1iz -phor1i -ph4os3p -ph3o4u2 -3phra -p2hr -4p2h1s -1phu -ph2u5i2 -2phy. -ph2y -3phy2l -4pi. -3pia2r -p2i1a2 -4pica -p5i4c1al -p2i3c2o2 -p2i4cr2 -pi2ct4 -p2ie2 -p4i1es2t2 -pi5eti -p2i1et -p5ifie2 -p2i1f -pi2g3n2 -p2ig -p2il -3pile -pil2l5in -pill2 -pil1li -5pilo -pi3l2ot -pim2 -pin4e -pin5et -3p4inge -pi4ng -p4in1n4 -5p4i4n1s2 -3p2i1o2 -pip4a -p2ip -pi4pe -5p2iq -pir5a4c -p2ir -pir1a -pir4t2 -p4is. -p4i2s3c2 -p2i4s2s -pis1s5a -pis5til -pist2 -pis4tr2 -p2itu -2p3k2 -p2l2 -1pl2a -pla5n1o -pla2n -plant5er1 -plan2te -plas5t2i1c2i2 -pl4as -plast2 -plast1ic -pla5t4o -4p4le. -4pled. -p2l2ed -3pleg -3plen -2ples -4pli2s1m4 -pl1is -4plist2 -plu2m -plum4b2e -plu2m1b -plumb5er1 -p4ly -2p1m -2pn -pnos4 -1po -4po. -po3c2a -p2oc -3pod -4pof -2p5o2i4d -p2oi -pois5i -p4ois -po5lem1ic -po4ly1 -pol4y3s -po1ma2n5 -p2om -po1ma -pom4e -p4o2n -pon4a2c -pon1a -pon4ce -pon1c -pon4i4e2 -pon1i -3pon3i1f -pon5ta -p2ont -2po2ny2 -po4pa -po5ple -p4op2l2 -4pora2to -por1a -por3ea4 -p2or2e -4po2r4ed -por3i4f -por1i -por3p -3p2ort2 -por5tie2 -3p4os -po4s1s2 -po1te -p2ot -poul1t5e -po4u2 -poul4t -pound5er1 -p2oun1 -pou4n2d -pounde2 -pout5er1 -p5ox3i -po4x -5p2oy -4p1p2 -p1pa2r3 -ppar1at5 -p4p4ene -p3pet3 -pph4 -ppi4c -p4p2l2ed -pp2l2 -p5p2ler1 -p5p1let -ppres2s5o -p1pr2 -ppr2e -ppre4ss -ppr4ob5a -1pr2 -prac1 -pr2a5d -pra2r4 -4p4re. -pr2e -pre1b3 -p2r4e1d -pr2ef5er2e -pre2fe -pre1f -prefer1 -prel5a4te -pre1l2a -3prem -pre5mat -pren3 -pres3a -pre5scin -pre2sc2 -pres1c2i2 -p3rese -5pressi -pre4ss -5pri1c2i2 -pri4es -prie2 -4pri4m -pring5er1 -pr4inge -pri4ng -pr4in2g5i -4pr2i3o2 -p5r4i1ol -pri4os -pri2s5in -pr2i4v2 -4pr2iva -4p4ro. -pr4o3bo2 -p3roc3a -pr2oc -pro4ch -pro1l -pron4a -pro4ph5e4 -pr2op -pro3pyl5 -pro2p4y -pro3r2 -pros4i -pros5tr2 -prost2 -pr4o3th -pr2ot -4p2r2y -2ps2 -p3sac -ps4al5t -psa2l -p3s2h -p1si -p5sin. -p2sin -p1s2o3m -p1st2 -psul3i -p1s2ul -3psy1c -ps2y -2pt2 -p2t3ab -p4tad -p4ta2n -p2ta2r -pt5ar1c2 -p1t4ed -p5ten1a -pt5en1n4 -5pte2r2y -pter1 -p5tet -pt4ic -p5tie2 -p3til -p2t3in -pt4ine -p3tise -p2t1is -p5tisi -p5t2om -p4tr2 -p1tu -pub1 -pu5b2e -p4uc4 -pu4ch4 -pudi4c -pu5er1 -puff5er1 -puf2 -pu2ff -puf1fe -pu4la2r -pu1l2a -pu5lar. -pu5l1is -pul2i -p4u4m -pu1m4o -p4un1 -pu4n4a4 -3punc -pun5g2i -pu4ng -pun3i -pun2t -pu3pi -p2up -pur5b -pu4r -pur3c2 -p4us -push4ie2 -pus2h -pu3tat -p5u5t1is -put2i -pu3tr2 -4p1w -2p4y -py3e -3py1g -3pyl -pyr3e -py5t -4qf -qu4 -5qu2ak -q2ua2 -4qua2r -qua5t2io2 -2que. -3quer3a -quer1 -4qu2er2e -4qu4es. -1que4t -5quin1a -q2ui2 -5qu2ir -3quito -4quitu -4ra. -r2a3ba -r1ab -5r2ab2e4 -3r2a3bin -r2abo2 -ra3bol -rac4a -r2acu -rac5u1l2a -ra5cu1lo -r2ad -ra4de -rad4in4e -ra2d1in -ra2g5o4u2 -ra3gr2 -3raill2 -r2ai2 -ra5ist2 -4ral2i1a2 -r2a3ly4 -r5a4m1n2 -r2a2m -ra3mu -r4andi -ra2n -ra4nd -ran5dis2h -randi4s1 -ran4du -ra5nee -ran4gen -ra4ng -ra3n2i1a2 -ran1i -ra3n2oi -ran1o -ran2t -ran5t4ed -ran2te -5ran3te2l -rant5in -rant2i -ran1t5o -rapol5 -r2ap -ra1po -rap5to -ra2pt2 -4rar1c2 -ra2r -rar2e2 -rar3e1f -rar5ia. -rar1i -rar2i1a2 -ras2 -r2as3c2 -r2as2e -r4as2k2 -r2a3so -ras1s5a -ra4ss2 -ras2s5in -r4as5te2 -rast2 -ra5t2a3p -r2a2ta -ra5ta3t4 -rat5eu -rath4e2 -r4a2th -ra2t3i1f2 -rat4in. -ra2t3in -ra5t2oc -ra2to -5r2a5tol -4r4at2om -ra4tos4 -ra5t2u1i2 -r2a2tu -rat5u4m -rat3u4r -rav5ai2 -r2av -rav5eli -ra2vel -rav3i2t -rawn4 -ra3z2ie2 -r2az2 -raz1i -r1b -r2ba -r4bag -rb3ali -rb1a2n -rba2r3 -r2b2e -rbe5c -r3bel -rbel5o -rb3ent -r4be1s2 -rb2i -rbic4 -rb2ic5u -r2bin -r5bine -rbit1 -r2bos -rbo2 -r4bum -rbu5t4 -r1c2 -rcant5 -rca2n -rca4s -r4c2ele -rce2n5er1 -r1cen2 -rc4ene -rcen5ten1a -r3cent -rcen1te -r2ces -r3ch2a3i2 -r2ch -rcha2 -rch3al -rch5ar4d -rcha2r -rch5ate -r3cheo -rche2 -r4ch1er1 -rch4i1er1 -rchie2 -r4ch1in -rch3is -r3chit -r3cil4 -r1c2i2 -rci5n2o1g -rc2ino -rcis2 -rciz4i2 -rc1iz -r2cl2 -r4c2le2 -r5clo -rco1lo4 -r1c2o2 -rcrit5 -rcr2 -rcriti4 -r2ct4 -rc5ti -r5d2a2m -r4d1a2n4 -rd4an. -r2da2r -r5de4l -r3de4n2s -r4des -rd5e4ss -rd5ia2n -r1d2i1a2 -r4die2 -r5d2ig -r2d2in -rd3i4ng -r3d2i3o2 -rd1i4s2 -rd5ler1 -r2dl4 -rd3li -r4dol -r1do -r2d5ous -rdo4u2 -r2e -4re. -rea4 -r4ea. -reac2t5iv -re1a2ct -re3af -re3a4g -re5alt -re5a2m1b -re2a2m -re3an5i -rea2n -re5ant -re5asc2 -re2as3o -r5e2au3 -3re2av -r5ebrate -re1b -re2br4 -reb1ra -re4b5uc -re3c1al -rec2a -rec4ce -rec1c4 -re3ce -reced5en -re2c2ed -re3cede2 -re3cha2 -re2ch -reci5si4 -re1c2i2 -r4e1c2r2 -rec4t3r2 -re2ct -re3cu -2r4ed -re1de2 -re3di4s1 -re4dol -re1do -re1dr2 -reed5i -re2ed -ree3m -3reer1 -re2fe -re1f -re3fin -re5gali -re5gra -re2gr2 -r2e3g4r2e -reg3ri -re3g4ro -reg3ul -rei4 -r4e3i1f -re1in -r4e3is -reit3 -reit4i -re1l2a -r2e1le -4r4e4l2ed -re3l2i1a2 -rel3ic -re5l2ig -rel2i4q -rel3li -rell2 -r5em. -rem5ac -rema4n4d -re1ma2n -rem5a2to -r3em1p -rem5ul -ren1a4 -ren5at -r4endi -r2e4nd -r4ene2 -ren4es -r4en1i -ren3ic5 -ren4it -ren4ter1 -ren1te -re5num -r4en2u -re3oc -3re2o1g -re5ol2a -reo2l -re3oli -3reo1s2 -re1pe -re4per1 -re5ph1 -rep5i2d -re3pin -re3ple -rep2l2 -r2e4pr2e -re1pr2 -re1q -rer4a -rer1 -r2er2e4 -re5rea4 -r2e3r2u -2r4es. -re3scr2 -re2sc2 -re3se4l -re3sem -re3ser1 -res5ist2 -res1is2 -re5s1it -re3spe -r3esq -re5stal -rest2 -rest5er1 -reste2 -re5s1tu -3reta2r -ret2a -re3ten -re4t4er3 -re5term -re1t2o -re5ton -re3tra -retr2 -r2e3tr2e -re5t1ri -re3tu -re3un1 -reu4r4 -re1v -re2v3el -revi4t -r1f -rf4l2 -rfu4m -r3fu -r1g2 -r4gag -rgal4 -r2ge -r5gee -r4g4ene -r3gen -r3ge4o -r3ger1 -rg5li -r2gl2 -rgu5f2 -rh2 -r5hel4 -rhe2 -rhe5o2l -rhos4 -3r2h2y -4ri. -ri3a2m -r2i1a2 -ri5ap -2r2ib -r4i3bo2 -ric2a5t4u -ric1at -2r4ice -rich5om -r4i2ch -ri1cho -rick4en -r2ick1 -ric4ke -r4icl2 -ri5cli -ri3col -r2i2c2o2 -ri5cor -ri4cra -r2i1cr2 -2r2icu -rid4al -ri2d -rid1a -rid4e -ri5el -rie2 -ri3er1 -ri2es -rift5er1 -r2i1f -ri2ft -rif5tie2 -5rifug4a -ri3fu -ri5g2a2m -r2ig -rig5ant -riga2n -ri5l4a -r4ile -rill5er. -rill2 -ril2ler1 -rill5in2g1s -ril1li -ril2lin -rilli4ng -4rim. -ri2ma -rim2a4g -ri1m5a2n4 -rim3at -r4i2m1b -ri1men4 -rim2e -4ri2m1m2 -4ri4m1s -rin4e -r4inet -ring5ie2 -r4in2g2i -ri4ng -rink5er1 -r4i4n2k -r4ino -ri4n4s2 -rin3s5i -rin4t5er1 -rin1te -r2i3o2 -ri2o4g -5rio2ne -ri4op -ri5or -ri5p2a -r2ip -ri5p2ie2 -rip5lica -rip2l2 -r2i5r -ri2s4c2 -r4is4is2 -r2is1p -ris4pa -ris4pe -ris5ter1 -rist2 -riste2 -4risti -r2i3ton -r5it5r2 -r2i4v -riv4al -r2iva -ri5vall2 -riv5eli -ri2vel -riv3en -riv3i4l -5r2i5zo -r1iz -r1j -r2k -r5kas -rk5ati -r5kell2 -rk5en1i -rk1er1 -r3ket -r3key -r3ki1er1 -rkie2 -r5ki1es2t2 -r5k2in. -rk1in -r5k2i4n1s2 -rks4m2e -r4k1s -rk2s1m4 -r1l2a -rlat3 -r1le -r3l4ic -r3l4ine -r5li4n1s2 -r4l1it -r1lo -r3mac -rma5ce -r5mad -r2mal -r4manc -r1ma2n -r4man1o -r4mar1i -r1ma2r -r4ma2r2y -rm4as -r4m3ati -rma5t2oc -rma2to -r5m2a5tol -rme2a2 -rm2e -r2m1ic -rm4ica -r5m2i2d -rm4ie2 -r5m2ig -rm2il5 -rmin4e -rm3i4ng -r4ming. -r4mi4te. -rmi2t -r3m2oc -r1mo -rmol4 -r1mu -rmu3l2i -r2n2 -rn3ab -rn1a -r3na2c -r5nad -rn5a2r -rn3ate -rn5a2t3in -rnati4 -rn5e2dl4 -r2n2ed -r3nel -r3ne4ss -rn5est2 -r3net -r3ney -r5n2i1a2 -rn1i -rn5ib -r3nic -rn3in -rn4ine -r1nis -rn3ist2 -rn2i5v -rn3iz -rn5n4 -r3n2oc -r5n2o1g -rnt4 -r5n4uc4 -rn2u -r5nut -4ro. -ro4b2e -rob3le -ro2bl2 -ro5br4 -5roc1c4 -r2oc -ro3cu -r2od -ro3do -ro3dy4n1 -ro2d2y -ro1fe -ro3gn2 -r2o1g -4r2oi -ro3i4c -ro2i4d3 -ro3l2a -r4o2l2ed -rol5ite -rol2it -ro3ly -romant4 -r2om -ro1ma -ro1ma2n -ro5mel -rom2e -ro3m2i2t -rom1i -romole2c5 -ro1mo -rom4p -ro3mu -ron4a2c -ron1a -4ronal -ro5nate -ron5ch -ron1c -ron4do -ro4nd -ron2g5i -ro4ng -r5onm2e -ron3m -ro1no -ron4ton5 -r2ont -ron1t2o -r2oo4 -1ro2o4m -5ro2o2t -r2op -4rop. -ro3pel -rop4in4e -rop1i -r4o1pr2 -r5opte -r1o2p1t2 -ror5d -4r2or2e -r4osa -ro3s2i4a2 -ro5s2ol -4ro4ss -ro5stat -r4osta -rost2 -ros4ti -ros5tit -ro3tat -r2ot -ro1te -ro4ter1 -ro3tu -5r4oue -ro4u2 -roul3 -round5er1 -r2oun1 -rou4n2d -rounde2 -rou5sel -r2ouse -4rou4ss4 -r4out -r4ow -row3er1 -4ro4x -rpas2s5in -r1p4as -rpa4ss2 -rp3at -rpe2 -r3pent -rp5er. -rper1 -r2ph -rph5e4 -r3phol -r1pho -rp3i4ng -rp5is -rpol3a -r1po -r2p5o4u2 -rpr2e4 -r1pr2 -rpre4t5er3 -r3pu -r1q -4r1r4 -rr2a4h -rran5g2i -rra2n -rra4ng -rr2ap4 -rre2l -rr2e -r4reo4 -rrhe3 -rrh2 -r3ri -rric4 -r2r2icu4 -rri4fy. -rr2i1f -rri3fy -rr4in5ge -rri4ng -rri4os -rr2i3o2 -rrob3 -rr2o1g5 -rr2o4t -r5ru -r2r2y5 -r3ry1i -r3rym -2r1s2 -r4sa4g -r2sa2l -r5sal1is -r5sal1iz -r2sa2n -r4sa2r -r2se -r3se2a2 -r3sec -rsel4 -rsell5 -rs3er. -rser1 -2r1s3e2r1s2 -r3set -r3sha2 -rs2h -r3shi -r4shie2 -r5s2i2a2 -r4s3ib -r5sie2 -r4sil -rs3i4ng -r2sin -r3s2io4 -r4s1it -r4s3iv -rs5li -r2sl4 -rst1or4 -rst2 -rstrat4 -rstr2 -r3su -r4sus -rswea2r4 -rsw2 -rswea2 -rt2 -r2t3ab -rta4g -rt3age -r3ta2r -r4tar2e -r2t3c2 -r1t4ed -r4te2dl4 -r3te2l4 -r5t2e4nd -rt3en1i -r5ter3er1 -r2t2er2e4 -rter1 -r5tet -r5teu -r4th4ene -rthe2 -rth2i -rth5ing. -rth1in -rthi4ng -rth3ri4 -r1t2h2r -r1t4ic -r4ticl2 -r5t2i1et -rtie2 -r5ti2l3a -r5t4ill2 -rtil5le -rt5i2ly -r2t1in -r3tin1a -rt3i4ng -r3titi -rti5tu -r2t3iv -r2t1iz -rt5let -r2tl -rt3li -r1t4o -rto5l -rt5ri2d -rtr2 -rt5s2i -r4t1s -r1tu -r4tus -rtwis4 -rt1w -rt2wi2 -r2u3a2 -r4ub2e -rub3r4 -ru4ce -r4uc -r2ud -rue4l -r4uf2 -ru3in -r2ui2 -ruis5i -ru2l -r4um2e -r4u1mi -ru4m2or2e -ru1mo -run4c2l2 -run1 -run1cu4 -runcul5 -ru4n2d4 -run2e -ru5net -ru4n4g -run4t -r2u2p -rup5lic -rup2l2 -ru3pu -rur4i -ru4r -rus4p -rust5at -r4ust2 -rust5ee -ruste2 -rus5t4ic -rus4t5u -ru3tal -ru3t2i -r1v2 -r4vanc -rva2n -r2ve -rvel4i -r2vel -r3ven -rv4e1n4e -rv5er. -rver1 -rv5ers. -rve2r1s2 -r3vest2 -r3vet -r3vey -rvi4t -r1w -2r2y -ry5er1 -5ry2g1m4 -ry1g -ry4go -ry2m4b -3ryn1go -ryn1 -ry4ng4 -4ryn4gol -ryp5a -r2yp -ry2t -ryth4i -r2z -2sa. -2s1ab -s3a2bl2 -5sack1 -sac4q -s3a2ct -sac4te -sad5i -sa2d5o -5sae -sa4g -3s2ai2 -sain4t -5s2ak -sa2l -sa5l4ac -sal2a -3s2ale -sa3lie2 -s4al4t -sa3lu -s2a4m -sa5min -sam1i -sa1m5o -sam2p4 -san3a -sa2n -san4d2ed -sa4nd -sande2 -s4an4e -san5ga2r -sa4ng -sang2a -san5i3f -san1i -2sant -sant5ri -san1t4r2 -s3ap -sa2p3r2 -sa2r5s2 -sa2r -3sas. -sa4s3s2 -sassem4 -s2a1t -sa2te -s5a2t1iv -s5a4to2r2y -sa2to -sat1or -s2a2t1u -1s2au2 -sau5c2i2 -s4a4uc -sau4r5 -savi2 -s2av -sa3vo4u2 -sa1vo -4s3b -s4bei -sb2e -sbe4s2 -s2by3 -sc2 -s1ca -sca5len -sc1al -sc2ale -s1c2a2p -scar4c2 -s1ca2r -sc2av3 -s1ce -s4c2ed -4scei -4s4ces -s2ch2 -sci2d5 -s1c2i2 -s2c2o2 -scof4 -s4c2oi -3s4cop4e -5scopic -scop1i -5scripti -scr2 -scr2ip -scri2pt2 -2s1cu -4sc4u4ra. -s1cu4r -scur1a -4scuras5 -2s1d2 -2se. -se2a2 -s4e2a2m -seas4 -sea3w -sec4a -sec5a2n -se2c2o2 -secon4 -2s2ed -se4da -sed4it -3se2ed -3sei -se2i3g2 -5sel2a -4s2ele -se3lec -selen5 -5self -2s4e1m2e4 -sem2i -semi5d -se1m4o -se4n5g -3se4n2s -sen5sati -sens2a1t -sen5sor1i -sent5ee -sen1te -5sen4t3m -seo5l2o1g -seo2l -seo1lo -se2p -se1p3a -sep4si -se2ps2 -3se2pt2 -sep3ti -ser4a2n -ser1 -se5r4en4e2 -s2er2e -ser3en -ser4t4o -sert2 -4ser1vo -ser1v2 -s2es -4s4es. -se5s2h -s5esta -sest2 -1set -5s4eum -3sev -sev3e4n -se1wo4 -3sex -sex1o2 -3sey -2s1f -sfac2t5o -s1f2a -sfa2ct2 -sfi4 -sf2or5e -s1fo -sfra2n5 -s1fr2 -2s1g4 -s2h -4sh1ab -sha2 -sh4abi -sh1er1 -she2 -sh5et1 -shil5li -shi4l -shill2 -sh5i1ne4ss -sh1in -sh2in2e -shine4s -sh3io2 -5sh2i2p -s3h2o4n -4shu4 -sh4y2s4 -sh2y -si4all2 -s2i1a2 -siast5 -4s1ib -s3ic1at -3sic1c4 -2s5icl2 -s2i4cu -si5cul -s4i2d -4sid. -si4de -side5l -sid3en -si1d5eri -sider1 -4si4d1s2 -5sid5u4a2 -si1du -si4e2r1s2 -sie2 -si1er1 -s2i1f4 -si2f5f -s2i4g -1sili -sim4p4ly -sim1p -simp2l2 -2sin -s2ine -sin5et -5sing5er1 -s4inge -si4ng -s2in3i -5s4i4n2k -si5nol -s2ino -si3nus -s2in2u -1s2io4 -4sio. -si5o5s -3s2ip -si4pr2 -s1is2 -4sis2h -4si2s1m4 -s4ist3a -sist2 -si4s1t3o -s1it -si4te -sit5om -4s1iv -5s2iva -s1j -s2k2 -4sk. -s5ka2r -ske2 -s3ket -s5key -s3ki1er1 -skie2 -s5ki1es2t2 -sk5i2ly -ski2l -sk5ine4s -sk1in -4s4k1s -s3ky3l -2sl4 -slan2g5i -sl2a -sla2n -sla4ng -s1lat -3sl4au2 -slav5eri -sl2av -slaver1 -s2le -s5lea2 -s3let -s5ley -s3l1it -s1l2o3c -slov5 -s5l4uc -2s1m4 -s3ma2n -smas4 -s3men -sm2e -sm2i3g -3sm2ith -smi2t -smo4d1 -s1mo -smu5ta1t2io2 -s1n2 -s2n1a -2so. -2s3od -so2d3o -so2d2y4 -3soe -4s3o2i4d -s2oi -s2ol -sol3a -so5l4a2n -so2l4er1 -so3lic -3solve -solv5er1 -1s2o2m -soma5to -so1ma -so3mat -3so2me. -som2e -so5met1e -so1me2t -so3mo -s2on1a -son5at -s4o2ne -son5or -s2o2p -4sor3ie2 -sor1i -5sor2i3o2 -sor4it -s5or1iz -sor3o -s3o2r2y -sos4 -4sose -s4o5th -s2ot -3so4u2 -sov5e -so3vi -spas1t4 -s1p4as -spens5a -spe4n2s -4speo -3sperm -sper1 -s5pero -spers5a -spe2r1s2 -sph2 -s3pha2 -3spher1 -sphe4 -spic5ul -sp2icu -s2pi2d -sp5id. -s5pi1er1 -sp2ie2 -spil4l2 -sp2il -s2pin -sp3i4ng -sp2i5n1i -spital5 -spit1a -s1p2l2 -sple2 -s4p4ly -s2po -5sp2om -spon5g2i -sp4o2n -spo4ng -3spo4n1s2 -3spoon -sp2oo -spr2u5d -s1pr2 -s4p4y -s1r -sr2e2 -sreg5 -sre1p5u -sre4s -4ss -s1sa -s5s2a4m2 -s1sel -s5se4n5g -s3sent -ssent5er1 -ssen1te -ss3er. -sser1 -s5seri -ss3e2r1s2 -s5seu -s3sev3 -s3s2i1a2 -s1sic -s1s2i1f4 -s2s1in -ss4in. -s4s2ine -ss4is. -ss1is2 -s3s2it -ss4ivi -s4s1iv -ss5li -s2sl4 -s2s3m4 -s4s1n2 -s1so -ssol3u -ss2ol -ss4ol1u4b -s4s2or2e -ssor5ial -ssor1i -ssor2i1a2 -ss5po -s1su -ss3w2 -st2 -4st. -s2t1ab2 -sta3bi -4s1t2ak -s4t2ale -stan2t5iv -sta2n -stant2i -s3tas. -5stat1i2c -s2t3c2 -ste2 -ste5a2r -stea2 -ste5at -s4te1b -s4tec -4s1t4ed -s4te2dl4 -s4te4d1n2 -4s2t2er2e4 -ster1 -ster4i1a2 -s1teri -s4tern. -ster2n2 -s3tero -st5es4t2 -s2t2es -s1th -s4tha4 -s4thu -s3t2i3a2 -3st2ick1 -st1ic -s3t2ic1u -stil5ler1 -st4ill2 -s4ti2ly -st3i4ng -st1in -5s4t2ir -s5t1iz -4s2tl -st3ler1 -st3li -s4toe -3ston -sto2n4e3 -ston4ie2 -ston1i -s5torat -st1or -stor1a -st4or5ia2n -stor1i -stor2i1a2 -s4tose -s2to4u2 -s4tr4ay -str2 -str2e4 -strep3 -3st4r4uc -str2u5d -2s4t3s -s1tu -s4tud -stu4m -stur4e -stu4r -4st1w -s4ty -1styl -4su. -su5a2n -s2ua2 -su4b1 -su2b1t2 -su2ct4 -s4uc -sud4a -su3e4t -su2f3f -suf2 -sug3 -3s2ui2 -su2i5c -su5i4ng -1s2ul -s4u2m -su1m3i -su4n4a4 -sun1 -su5pe -s2up -su3pin -supra3 -su2pr2 -sur4as5 -su4r -sur1a -sur3c2 -s4ur1g2 -sur3p2l2 -su5su -su5z -2s3v -svers5a -sver1 -sve2r1s2 -sves4 -sve2st5i -svest2 -sw2 -5swee -swel4l5i -swell2 -4sw4e2r4ed -swer1 -sw2er2e -2s1wo -s2y -4sy. -sy4b2i -s4y1b -sy1c -sy4ce -sy4c2hr -sy2ch -sy4d4 -1syl -3syn1 -syn5e -sy5pho -s2yp -sy2ph -syr5i -2ta. -2t1ab -ta5blem -ta2bl2 -3tabli4 -t2abo2 -ta3bol -ta4bo4u2 -t4a3ce -ta5ch2om -ta2ch -ta1cho -ta3ch2y -ta4ci2d -t2a1c2i2 -t5ade -tad4i -5t2ad1j -ta5d2or -ta2do -tad2r2 -tae5n -taf4 -tage5o -ta5g2o1g -3ta2gr2 -3t2ah -1t2ai2 -3tail -2ta2ir -t4ais -1t2ak -tal2c2 -tal5ent -t2ale -ta5lep -t4al2i1a2 -t4al1in -tal4l3a -tall2 -5tal1lu -t2alo4 -t2a3ly4 -tam5ar1i -t2a2m -ta1ma2r -5ta3me2t -tam2e -tamor2ph5 -ta1mo -ta3morp -tan5at -ta2n -tan2a -tand5er1 -ta4nd -tande2 -t4ane -5tanel -tan5ie2 -tan1i -t5an1iz -ta2nt5a2n -tan2t2a -t4a4pa -t2ap -1tard -ta2r -tar5ia. -tar1i -tar2i1a2 -tark5i -tar2k -tar3n2 -3t2a4r1r4 -tas3i -t3a2s1m4 -5ta4ss2 -tas4t2 -t2a3sta -tast5i4c -t4ateu -3ta2t1is -t4a2to. -ta2to -tat4o4u2 -t2a2t4r2 -ta1t3ut -t2a2tu -tau3t2o -t2au2 -t5awa -tawn4 -t4a4x -4t3b -2tc2 -t1ca -tcas4 -tch5e2t2t4 -t2ch -tche2 -tchet1 -tch5u -4t1d4 -4te. -te5cha2 -te2ch -5tec2h1n2 -te3cr2 -t4ed -te5d2a -4ted1d4 -4te1do -4tee1i -te2g -5tegic -te1g2i -t3ego -te2g1r2 -teg3u -tei4 -te2l -4t4e4l2ed -t2ele -tel5iz -1tell2 -4te3lo -3te4ls -tem3a -4te1m2e4 -t4e5m4on -te1mo -ten4ag -ten1a -4te2n3a2r -4t4ene -t5en3m -5tenn1a -ten1n4 -4ten1o -te5n2o1g -tent4a -te2o -teo5l -2tep -te3pe -tep5i -tera4c -ter1 -t4era4g -t4era2to -3ter3b -5t2erd -2t2er2e4 -ter3e1b -ter5ec -5terel -te3reo -3tere4s4 -1teri -ter3i1a2 -ter5i2d -ter5i1f -t4er3in -ter5iorit -ter2i3o2 -teri5or -terior1i -t4er3i2t -ter5k4 -5tern3it -ter2n2 -tern1i -ter5no -3te4r1r4 -2t2es -4t4es. -tesi4 -t3esq -t3ess. -te4ss -t5ess2es -tes4t2 -test3a -5teste2 -test5er1 -test5in -te2sti -test5or -tes5tu -teti4 -tet1r2 -tet1r5o -tew3a2r -te1wa -3tex -2t3f -t3g -2th. -tha4 -th5al. -thal3m2 -4t4he. -the2 -4th2ea2 -th5eas -4th2ed -1th4ei -3theo -theo3l -t4her1 -5ther2ap -th5er1c2 -t5h2erd -4th4e2r4ed -th2er2e -th3er2n2 -th3e2r2y -4t2hi. -t5hill2 -thi4l -3th4i4n2k -th1in -5th4io2 -th4is. -th5lo -t4hl -2t2h1m2 -th4mi -th3oli -4t5h2oo -4th1o2p1t2 -4thores -th2or2e -3th2ot -5thoug4 -th2o4u2 -1t2h2r -2t2h1s -5thu4r -5thy2m -th2y -3thyr4 -th4y2s4 -4ti. -1t2i2a2 -ti3ab -2t3ib -5t4i5bu -t1ic -t3ic. -tic5as -t2i1c2i2 -tici5a2r -tic2i1a2 -3t4i3cin -t4icity -ti3col -t2i2c2o2 -t2ic1u -4ticule -t3id. -ti2d -t4id1a -3tidi -ti3die2 -t5i4d1s2 -3t2i2en -tie2 -1t2i1f2 -ti3fe -4ti2ff -4ti4f3ic. -3t4i1g2i -t2ig -ti3g2i5o2 -4ti2g1m4 -5tigu -ti4ka -t2ik2 -ti4let -5til1in -t4ill2 -til4l5ag -til1l2a -t4ilt -1tim -tim1a -5ti1me2t4 -tim2e -t1in -5ti5nad -tin1a -4t4i2n2ed -tin3et -ti4ng5i4ng -t4in2g2i -ti4ng -3t2in1n4 -4ti4n1s2 -t4int -tin4te -tin5t4ed -tint5er1 -tin3ue -t2in2u -1t2io2 -ti3oc -tiol3a -t4i1ol -ti5o3mo -ti2om -4tionem -tio2ne -1t2ip -ti5plex -tip2l2 -ti3pli -t2i4q -ti5q2ua2 -tiqu4 -t3iris -t2ir -tir1i -2t1is -3tis1a2n -ti4sa -ti2s4c2 -tish5i -tis2h -3t2i4s1s -tis2t2 -5t4iste2 -t4is1tr2 -ti5t4a2n -tit1a -tith4e2 -t2ith -tit5il -t3i2t1is -3ti2tl -ti3tra -t2itr2 -3t2i1u2 -2t1iv -tiv5all2 -t2iva -t3ive -tiv3is -2tl -t1l2a -tlant4 -tla2n -5tle1b -5tle5dr2 -t2l2ed -3tle1f -3tlem -5tlen -5tletr2 -t1let -5tlew -t1li -tlin4 -4t3m -t1me2t2 -tm2e -tm2o4t5 -t1mo -2t3n2 -t4n2er2e -tn1er1 -2to. -toas4 -t2oa2 -to1b -4toc1c4 -t2oc -to3de5c -tod4i -to5do -3toe -1t2o1g -2t3o2i4d -t2oi -5tok -4to2l2ed -tol4l2 -tolu5 -to5ly -to2m3ac -t2om -to1ma -to1ma4n -tom2at5ol -to3mat -toma2to -to2m4b -to4m2o1g -to1mo -tom5os -to2n4e -t4on5ea2 -3ton1n4 -to4n3s2 -top4e -to5p2i1a2 -top1i -to4p4os -to1po -t1or -to5r2ad -tor1a -4t2or2e -tor5er1 -tori4as -tor1i -tor2i1a2 -to4r5oi -tor5p -tor4q -3tos. -t4o3s4p -tos4t2 -to5str2 -to5tal1is -t2ot -to5tal1iz -to3tem -tot5u -tou4f2 -to4u2 -5tou4r -t3ous -4tov -to3wa2r -t3p -tr2 -tra4c2o2 -4trad1d4 -tr2ad -4tra5ist2 -tr2ai2 -tra5q -trar2ch4 -t4rar1c2 -tra2r -tra5ven -tr2av -tra5ve2r1s2 -traver1 -trav5est2 -3tr4ay -4t4re. -tr2e -4t2r4ed -tre4mo -tren4 -tr4end5i -tr2e4nd -tre5pr2 -tre4s4s -4trew -t5r4icl2 -3t2r2icu -t2rie2 -tri5fli -tr2i1f -tr4i2fl2 -t5rifu5g4a -tri3fu -2tril -tri3li -tri3m2e -t2rit -4tr2ix -t4r2od -tro5f -5troo2p -tr2oo4 -tro4pha2 -tr2op -tr4o3sp -t2r2ot -t5ro1t2o -tro1v -3tr2oy -t4r4uc -tr2u3i2 -2t4r2y -tr4y2s4 -4t1s -t2sc2 -ts4h -ts2i -t4sil -tst4ay4 -tst2 -2t1t4 -tta4 -t3t1ab -t5ta2n -t5tas -t3t4ed -t4t2er2e4 -tter1 -t5ter3er1 -t5tes4t2 -t2t2es -t3ti -tti3tu -ttitud4 -ttitu5di -t3t2ler1 -t2tl -t3t1li -t5t2oi -t5t1or -t3tos -t4t5s -t4tu1pe -tt2up -t2ty -4tu. -t2u1a2 -tu4al5li -tu1al -tuall2 -tuar3i4 -tua2r -tu4bin -tu1b2i -tu5bu -tu5den -tud4e -tud5ie2 -tu5en -4tuf2 -t2u1i2 -tu4is -2tum. -3tu1mi -4tu4m1s -3tun1 -tu4n4a4 -tu4ne -tun5it -tun2i -tup5let -t2up -tup2l2 -tup5lic -tu5rac -tu4r -tur1a -t4ura2n -tur2b3a -tur1b -tur4d -turf5i -tur2f -5turit -tur1i -tur4n2 -5tur5o -1tut -4tu2t4iv -tut2i -t1w -t3wa4 -t2wi2 -twi5li -t3wit -t3wo -twon4 -4ty. -ty4a2 -5ty2ch -ty4let -tyle2 -tyl5i -ty5mi -1t2yp -3type -1tyr1 -2tz2 -t5z2i1a2 -tz1i -t5z2ie2 -2ua2 -u2a3c2i2 -u2ag -u2a5h -u1al -ua5lu -uan4o -ua2n -uan2t5is -uant2i -uant5it -uar3a -ua2r -uar2d -uar3i -uari4n -uar5te2r1s2 -uart2 -uarter1 -uar4t5i -ua5ter2n2 -u4ater1 -uba4 -ub5b2ly -u2b1b2 -ub2bl2 -u1b2i -u4b2icu -ub3lin -u2bl2 -ub5lo -ub3ra -ubr4 -4uc -u1c2a -uc1cen5 -uc1c4 -u4c2e4nd -u1cen2 -u4ch -u5c2hr -uc3l2 -u4c2om -u1c2o2 -uc2o5t -uc2tr2 -u2ct -uc3ub -uc5ul -u5cum -u5d2ac -ud1al -ud4e -ud5e1p -u4der1 -udev4 -ud4g -ud4i4cin -udi1c2i2 -ud3i2ed -udie2 -u5dinis -u2d1in -ud2in1i -u3d2i3o2 -u5di1t2io2 -u2do -u5d2oi -ud5on -u5d2or -ue1b4 -u4ed -uen4o -uen4ter1 -uen1te -uer3a -uer1 -ue4s4s -uest5rat -uest4r2 -uest2 -ues5tri -ue4t -uf2 -3u1f2a -u3fl2 -u4fo -u2ft4 -uga4c -ug5l2i1f -u2gl2 -ug2n1i -u2gn2 -u4go -ug3ul -ug3ur1a -ugu4r -uhem3 -uhe2 -2ui2 -ui3al -u2i1a2 -u2ic -ui3cent5 -u4ice -ui1cen2 -ui1d5o -ui2d -ui2l4a -uild5er1 -ui3lib -uil4t -uin1c5u -u2inc -ui4n4s2 -uint4 -uin4ta -ui5pr2 -u2ip -uis3er1 -uis4t2 -uisti4 -uit5er1 -ui5val5 -u2iva -ui3vo -u2iz -4ul. -u1l2a -u4l1ab -4ul4ac -ul5ard -ula2r -u5lat -ul4bo2 -ul3b -ul3ca -ul1c2 -ul4ch -5ulch4e2 -5ulchr2e4 -ul3c2hr -4ulea2 -u5lee -u1len4 -4ulen1c2i2 -u5lent -u1let4 -u2l4ev -ul2f2a -ul2i -ul4i1a2 -u3l4ine -ul3i4ng -ul5is2h -ul1is -u5liti -ul1it -u5lity -4ull2 -ul4lat -ul1l2a -ul4l5ib -ul1li -ul4l1is -ul4l1it -ul3m2 -u1lo -u5l2om -ulph3i -ulph2 -ul2ph3o -ulp5i4ng -ul4po -2u4ls -ul3s2i1f4 -u1lu -ul1v4 -u1ma -u2m3a2m -u1ma2r4 -u5mas -um4bar. -u2m1b -umba2r -um2b2i -umen4t -um2e -u1men -u1mi -u4m1ic -u2m5i1f -umi4fy -umi5l2i1a2 -um2il -umin4a2r -umin1a -u4m4i2n2ed -u4m3i4ng -u4mor1a -u1mo -u4mos -um2p -um4pa -ump3er1 -ump5li -ump2l2 -um2pt4 -ump5te -u1mu -umu4lo -un1 -u4n3a4 -un5ab -unabu4 -un4ae -un4as. -un2as -un2ce -un4dal -u4nd -un2da -un3d2ed -unde2 -un1de4t -undeter5m -undeter2 -un1di4c -un4die2 -un3do -un4dus -un1du -u3n2er1 -unho5li -un1h2 -unho4 -un2i -u1nic -un4ie2 -un3in -un4ine -un2i5p -uni3s4o -un3ist2 -un2i1v -un3iz -unk5eri -u4n2k -unker1 -un5ket -un3kn2 -2un1n4 -un4nag -unn1a -un5o -un5r -u4n3s4 -un5s2h2 -un2t2i -until4 -un2u4 -un3us -uo3de -uo3dent4 -u5oros -u3os -uo5t4a1t2io2 -u2ot -u1o4u2 -2up -u1pat -u1pe -u5p4ee -uper3 -u1ph -u5pi2d -up3i4ng -u4po -u5pol -u2pr2 -upr2e4 -u5que4t -uqu4 -u4r -ur1a -4u4ra. -ur2a4c2i2 -4urae -ura2g -4urant2i -uran2t -ura2n -uras5 -urb5i4ng -ur1b -urb2i -ur2bin -ur2c2 -urc3a -ur5den. -ur5den1i -ur5die2 -ur4du -ur3ea4 -ur2e -ur5ee -ur1er1 -ur3e2r1s2 -ur1e2t -ur3e2t2t4 -ur2f -ur3f2a -ur1i -u5r2i5cu -ur4ie. -urie2 -ur5ifie2 -ur2i1f -uril4 -ur4ili -ur5ion -ur2i3o2 -uri4os. -ur2l5er1 -ur1le -ur5lie2 -url5i4ng -ur1m4 -urn3al -ur2n2 -urn1a -urn3er1 -ur4n5s -ur1o -ur2o4d -ur5o4m -ur5ot -uroti4 -ur3pen5t -urpe2 -ur2ph4 -u2r2s2 -ur2s5a2l -urs5er1 -ur2se -ur3s2h -urs3or -ur5ta -urt2 -ur1te -ur5t2es -urth2 -ur3the2 -urti4 -ur1u -ur4va -ur1v2 -u3sad -us3a4g -us3a2l -us4ap -us3a1t -2usc2 -us4ca2n -us1ca -ush5a2 -us2h -us5ia2n -us2i1a2 -usil5 -u4s1in -usk5er1 -us2k2 -uske2 -us1p -us4pa -uss4e -u4ss -4ust2 -us3tac -us5ta2n -ust4ic -us5t2i1c2i2 -ust5ig -ust3il -us1to4 -us1tr2 -us4tr2e4 -usur4e -usu4r -us5ur1i -u3t4ane -uta2n -utch4e2 -u2tc2 -ut2ch -ut5en1i -u5te2o -u4t2er2e4 -uter1 -ut2i -u3tie2 -ut3i4ng -ut1in -u5t2in1i -u3t2io2 -ut5i2s1m4 -u2t1is -ut3is2t2 -5u5t1iz -ut3le -u2tl -ut1li4 -ut2o -u4to5s -u4t1ra -utr2 -u4t1s2 -ut5s1m4 -ut4to2n4e -u2t1t4 -u3tu -u4tul -uu4 -uv2 -u4va -uve2 -uven3 -uv5eri -uver1 -u5v1in -ux2o -uy4a2 -uy5er1 -4va. -2v3ab -5vac -va1c2a -va5ceo -vacu1 -v4ad -3vag3a -va4ge -4va2g2e4d -vager4 -va2g5r2 -v1al. -1v2ale -vali2 -va5lie2 -val4ise -val1is -5valu -5val4v -vam4i -v2a2m -va5mo -5van1n4 -va2n -van2t2a4 -4van2tl -var4is -va2r -var1i -4vas2e -vas5el5 -v5a4so -v2ast3a -vast2 -v4at. -5vatee -v4at4in1a -va2t3in -4v2a2tu -2ve. -ve2ct4 -ve3g -3vei -2vel -vel3at -vel2a -4v2ele -v3e2l1er1 -ve5l4ine -v1ell2 -v4el1l2a -vel5ler1 -vel3li -vel5op1i -ve4n4al -ven1a -ven4do -v2e4nd -v4e1ne -ve5n2i1a2 -ven1i -ven2t5o -ven4tr2 -4v4en2u -v5en5ue -5ve3o -5ver1b -ver1 -verde5v -v2erd -4v4er2e4 -ver5ea4 -ver3ei4 -v5er3ie2 -ver3m4 -ver4ne -ver2n2 -5ver2se -ve2r1s2 -4v4es. -4vi. -5vi3al1it -v2i1a2 -vi4a2tr2 -vi3at -vi1b4 -vic2 -vi4ca -vi5car1i -v2i1ca2r -vice3r1 -v4ice -5vi2ct2 -5v2icu -5vider1 -vi2d -vi2gn3 -v2ig -vi4l -vil3i -3vil2i4a2 -v5ilise -vil1is -v5ilize -vil1iz -vil5lin -vill2 -vil1li -vim4 -5vim2e -2v1in -vin4a2c -vin1a -3vin1c2i2 -v2inc -vin2e -5vinit -v2in1i -v5in1iz -vint4 -vin5ta -3v2i1o2 -v4i1ol3 -vi5om -5v2i3p -vir2e4 -v2ir -vi5r2i4d -vir1i -vir3u -5visecti -v4i1sec -vise2ct -5vi1s2io4 -v3i2s1m4 -2v5ist2 -vi2t -vit2a -vi3tal -vi5te2l -v5it1ie2 -v2it1r2 -vi3tu -v3ity -viv5al -v2iva -viv5or -vi2vo -v2i5zo -v1iz -1vo -2vo. -vo2l -vo5li1t2io2 -vol2it -vol4u1b2i -v4ol1ub -volv4 -4von -vo5rac -vor1a -3vor1c2 -4v2or2e -3voro -vo3ta2r -v2ot -2vow -vr4 -v5ra4 -v5ri -v5ro -vr2ot4 -4vs -v3ur2e -vu4r -2vv2 -v5ver1 -v5vi -4vy -4w1ab -wag3o -wais4 -w2ai2 -w3al. -wal2l5er1 -wall2 -w3a4ls -wan5gli -wa2n -wa4ng -wan2gl2 -wank5er1 -wa4n2k -war5d2ed -wa2r -ward5er1 -ward5r2 -war4f -war4te -wart2 -war5th2i -war2th -wa4ss4 -was4t2 -wa1te -wav4in2e -w2av -wa2v1in -w1b4 -w4bon -wbo2 -w5c -w5die2 -w3dr2 -we4b -w4ed -3we2ed -5wei -weight5i -we2ig2 -wei2gh -weigh2t -we2ir4 -wel3i -wel1iz4 -wel4iz3i2 -wel4li -well2 -went4 -wes4 -west3 -w5e4st. -w5f -wh2 -w5hi2d -wi2 -wid4e -wi2d -wi5er1 -wie2 -wil2l5in -will2 -wil1li -wim2p -win2e -wing5er1 -w4inge -wi4ng -win4tr2 -3w4ise -with5eri -w2ith -wit4her1 -withe2 -w3l2a -w2l1er1 -wl1i -wl4ie2 -w1m -1wo -wol4 -wol5ver1 -3w2om -w2on2t -word5i -wot2ch4 -w2ot -wo2tc2 -w2oun4 -wo4u2 -wp5in -wra4 -ws5i4ng -w2sin -w5ster1 -wst2 -wste2 -wt4 -w5te -w3to -wy2 -wz4 -x1a -x4a2ch -x4ade -x2ag -x3a2g1g -xa5me2t -x2a2m -xam2e -x3am1i -xa4n5d -xa2n -xan1o4 -x2as -xas5p -x3c4 -xc2av3 -xcor5 -x1c2o2 -xe4 -x1ec -xec3r2 -xe5cu3t2io2 -xecut2i -xecut5o -x2e2d -x5e2dl4 -x5e4d1n2 -x5eg -x1em -x3en -xen4op -xen1o -x3er1 -xer4g2 -xer3o -x1h -xhort4a -xh2ort2 -x1i -x3ia. -x2i1a2 -x4ias -xi4c -x5i1ge -x2ig -xim3a -x4im2e -xi1me2t4 -x3io2 -x2i4p -x4it. -x4i4t1s -x1o -x4ode -x5om -xo4mat -xo1ma -xo4n -x4os -xotr2op4 -x2ot -x4o1tr2 -x3p -xpel4 -xp4o5n2 -x1po -xp2oun4 -xpo4u2 -x1s2 -x1t2 -x4t4ed -xtens5o -xte4n2s -x1ter3i -xter1 -xter4m3 -xter2n3 -x4th -xti4 -xtr2a5d -xtr2 -xtr2a3v -xtr2e4 -xu4o -x1u4r -xur4b -x5us -x5w -xx4 -x4y2s4 -xy3t -y1a2 -y5ac -1y2a2r -3yard -yas4i -4y1b -yb2i -yc2a5m -y5chede2 -y2ch -y4ch2ed -yche2 -ych5is -y3cho -y4chose -yc1l2 -ycl2a2m4 -ycl2a -y4coli -y1c2o2 -y4coll2 -yc2om4 -y2cos -y1d4 -yda4 -yder4 -ydro5s -ydr2 -y4dro4u2 -y3ee -yel5o -y3en -y1er1 -y3e4st. -yest2 -yes5te2 -y5e2t2t4 -y5f -y1g -y1g2i2 -yg2i5a2 -y3gl2 -yg2o4i -y1h -y1i -y3in -yle2 -ylin5de2 -yl2i4n4d -yllab5i -yll2 -yl1l2a -yll1ab -yl3os -yl5o4u2 -y1m2e4 -y3men -y5me2t -y5m2i1a2 -ym5in -ym2ot4 -y1mo -ym4pha2 -ym1p -yn1 -yn5ago4 -yn1a -yna4nd5 -yna2n -yn5ap4 -yn5ast2 -yn2as -yn4c2i2 -y4nd4 -yn2e -yn3er1 -y4ng4 -yn4gol -yn1go -yni4c -yn1i -y2n4y2 -y1o2 -yo3d -yo4g4i4s -y2o1g -yo3g2i -y2oun4 -yo4u2 -you4ng5 -2yp -yp5al -yper3 -y5p2er2e -y4p4eri -y4pero -y4pet -y2ph -yph4e4 -yph3i -y4p1i -y2p1n -y1po1 -y4po4x -y2pr2 -yp5ri -yp4si -y2ps2 -yp5sy5f -yps2y -ypt3a -y2pt2 -y5pu -y3rag -yr3at -yr3ic -y5r2ig -yr3is -yr3i4t -yr5o1lo -y4r4r4 -y2r4s2 -yr5u -4y2s -ys5a4g -ys5a1t -y3s2c2 -y3s2h -ys1ic -y2s3in -y1s2i4o4 -yso5 -ys4so -y4ss -ys1t2 -ys4to -y3u -yv4 -y3w -yz5er1 -yzy4 -z1a1 -2za. -za4bi -z1ab -z2a2i2 -z4as -za4te -zd4 -ze1b4 -z2e4d -zen4a -z5e4ng -zer5a -zer1 -z3et4 -z1i -zib5 -5zic4 -z2ie2 -zi5m -zin4c3i2 -z2inc -z3i4ng -z4in2g5i -z4is -3zlem -z3ler1 -z3li -4zo. -5z2oa2 -zo3a2n -3z2oo2 -zo3ol -zo3on -zo5o2p -zo5oti -zo2o2t -zo5p -z2ot2 -z5s -5zum -4zy. -zz2 -z3za2r -zz1a1 -z5z4as -z3z2ie2 -zz1i -zzo3 -z5z2ot2 diff --git a/dist-packages/wordaxe/wordaxe/dict/hyph_ru.dic b/dist-packages/wordaxe/wordaxe/dict/hyph_ru.dic deleted file mode 100755 index 9d36dec61..000000000 --- a/dist-packages/wordaxe/wordaxe/dict/hyph_ru.dic +++ /dev/null @@ -1,3875 +0,0 @@ -ÁÂ×ÇÄÅÖÚÉÊËÌÍÎÏÐÒÓÔÕÆÈÃÞÛÝßÙØÜÀÑ -.Á×É2 -.ÁÄ1Ò -.ÁÄÉ2 -.ÁÉ2 -.ÁË1× -.ÁË1Ò -.ÁÌØ5 -.ÁÓ1Ð -.ÁÕ2 -.ÁÛ1È -.ÁÜ2 -.ÂÅ2Ú1Á2 -.ÂÅ2Ú1Õ2 -.ÂÅ2Ú3Ï2 -.ÂÅ2Ó1Ô -.ÂÅÚ1ÎÁ -.ÂÅÚ1Ò -.ÂÉ2Â1Ì -.ÂÕ1Ç -.×Úß2 -.×Ï1×2 -.×Ï2Ð1Ì -.×Ï2Ó3ÔÏÒ -.×Ï2ÓË -.×Ï3Ð2ÌÏ -.×ÏÚ1ÎÁ -.×Ó6Ð -.×ß2 -.×ÙÐ2ÌÅ -.×ÙÓ2Ð -.ÇÏÓ1Ë -.Ä×Ï2Å -.ÄÅ2ÚÉ -.ÄÉ2Á -.ÄÉ2ÓÔÏ -.ÄÏ1ÓÍ -.ÚÁ3×2ÒÁ -.ÚÁ3Ð2Î -.ÚÁÓ2 -.ÚÁÕ2 -.ÚÁÛ2 -.Ú×ÕËÏ3 -.ÚÏ2Ï3 -.ÉÇ1Ì -.ÉÇ1Ò -.ÉÅ2 -.ÉÚ1Î -.ÉÚ1Ò -.ÉÚÏ2ÂÌ -.ÉÉ2 -.ÉÏ2 -.ÉÓ1ÔÉ -.ÉÓ1ÔÏ -.ÉÓ5ÔÒ -.ÉÕ2 -.ÉÀ2 -.ËÏÎ2ÔÒÎ -.ÌÅ1Í -.ÌØ2 -.ÍÅ2Ö3 -.ÍÅ3Ö4ÁÍ -.ÍÅ3Ö4ÁÈ -.ÍÅ3Ö4Å -.ÍÅ6ÖÉ2Ï -.ÍÏ2Ç1Ì -.ÎÁ1Þ2Î -.ÎÁ1Û2ËÏ -.ÎÁ2É -.ÎÁ5×6 -.ÎÅ1× -.ÎÅ1Ú2 -.ÎÅ1È -.ÎÅ3Ï2ÔÒ -.ÎÅ5Ì -.ÎÅÁ2 -.ÎÅÂÅ2Ú1Ï2 -.ÎÅÍ2ÎÏ -.ÎÉ1Ó2Ë -.ÎÏÓ5Ë -.ÏÁ2 -.ÏÂ1ÌÁ -.ÏÂ1ÌÅ -.ÏÂ1ÌÏ -.ÏÂ1ÌÕ -.ÏÂ1ÒÅ -.ÏÂ1ÒÕ -.ÏÂ3Ï2ÓÔ -.ÏÂ5ÌÉ× -.ÏÂ5ÌÉÔ -.ÏÂÅ2Ú1Ï2 -.ÏÂÅ2Ó1Ô -.ÏÂÏ1ÌØ -.ÏÇ5Î -.ÏÚ2 -.ÏÓ1ÐÉÎ -.ÏÓ2ÐÁÒ -.ÏÔ1Ò -.ÏÔ1ÈÌ -.ÏÔ3× -.ÏÔÏ1Í2 -.ÐÏ1×2 -.ÐÏ1Ö2 -.ÐÏ2ÄÙÇ -.ÐÏ2ÄÙÍ -.ÐÏ2ÄÙÎ -.ÐÏ2ÄÙÓ1 -.ÐÏ2ÄÙÔ -.ÐÏ2ÄÙÝ -.ÐÏ2ÓÔ1ÉÎ -.ÐÏ3ÄÙÍÉ -.ÐÏÄ1×Ï -.ÐÒÅ1Ì -.ÐÒÅ2Ö1Ä -.ÐÒÉ1Ç2Î -.ÐÒÉ1Í2Î -.ÐÒÉ3Ë2Î -.ÐÒÉÓ2Ë -.ÐÒÏ1ÓÎÕ -.ÐÒÏ3ÓÌ -.ÐÒÏÓ2 -.ÒÁ2ÚÏ -.ÒÁ2Ó1ÔÁ -.ÒÁ2Ó1ÔÅ -.ÒÁ2Ó1ÔÅË -.ÒÁ2Ó1ÔÅÞ -.ÒÁ2Ó1ÔÉ -.ÒÅÇÉ6Ï -.ÒÏ2È1 -.ÓÅË1Ó2Ô -.ÓÅÐ5Ô -.ÓÏÐ1Ì -.ÔÅË1Ó -.ÔÏÐ1Ì -.ÔÒÁÎ2Ó1 -.ÔÒÅÈ1 -.ÔØ2 -.ÕÇ1ÌÅ -.ÕÇ1ÌÏ -.ÕÄ2Ì -.ÕÄ2Ò -.ÕÅ2 -.ÕË2 -.ÕÒ6× -.ÕÀ2 -.ÆÉ2ÚÏ -.ÈÉÍ1Þ -.ÈÌÁ2 -.ÞÁ2Å -.ÞÅ2ÓÔ1× -.ÞÅÒ2ÓÔ1 -.ÞÅÔÙÒÅÈ1 -.ÜÏ2 -.ÜÑ2 -.ÀÁ2 -.ÑÉ2 -1ÁÄÍ -1ÁÐÐ -1ÁÔÁË -1Â2ÌÁÇÏ -1Â× -1ÂÅ -1ÂÌ -1ÂÒÉ -1ÂÕ -1ÂÀ -1ÂÑ -1×2ÎÕË -1×2ÎÕÞ -1×2ÓÐ -1×2ÓÈ -1×2ÓÀ -1×2ÛÉ× -1×ÁÇ -1×ÁË -1×ÅÇ -1×ÅÌÌ -1×ÅÒ. -1×Ú2 -1×ÉÈ -1×ÉÞ -1×Ì -1×ÏË -1×ÏÑ -1×Ð -1×Ò2 -1×ÕÄ -1×Ù -1×À -1ÇÁ -1ÇÏÒ -1ÇÒ -1Ä2×ÏÒØ -1Ä2ÌÅ× -1Ä2ÎÅ×Ë -1Ä2ÎÅ×ÏË -1Ä2ÒÁÖ -1Ä2ÒÁÚÎ -1Ä×ÉÖ -1Ä×ÉÚ -1ÄÖ -1ÄÚÅ. -1ÄÎÅ×Î -1ÄÎÑÛ -1ÄÏ× -1ÄÏÔ -1ÄÏÞ -1ÄÒÅÓÓ -1ÄÒÏ2Ç1Î -1ÄÒÏ -1ÄÒÏÍ -1ÄÕÎ -1ÄØÅ -1ÄØÑ -1ÖÇ -1ÖÄ -1ÖÍÕ -1Ú×Õ -1ÚÏÌ -1ÚÒÉ -1ÚÕ -1ËÁ× -1ËÁÅ -1ËÁÐ -1ËÁÔ -1ËÁÀ -1ËÉ× -1ËÌ -1ËÏ×Ù -1ËÏÍÐ -1ËÏÎ -1ËÏÏ -1ËÏÓ -1ËÏÛ -1ËÒ -1ÌÁ2ÐØ -1ÌÁÎÄ -1ÌÅÄÅ -1ÌÉ2Ð1Ô -1ÌØÏ -1ÌØÀ -1ÌÀÔ. -1Í2ÎÏÖ -1ÍÁÇ -1ÍÅÄ -1ÍÅÊ -1ÍÅÎ. -1ÍËÎ -1ÍÏÎ -1ÍÝÅ -1ÍÙ. -1ÎÁ. -1ÎÁ1Ç -1ÎÁ1Ó2 -1ÎÁÄ -1ÎÉà -1ÎÏ. -1ÎÏÊ -1ÎÏÍ -1ÎÏÈ -1ÎÏÀ. -1ÎÒÁ× -1ÎØÏ -1ÎØÀ -1ÎÑ -1Ï2Â1ÌÁÞ -1Ï2ÂÉÖ -1Ï2ÂÏÚ -1ÏÂÅÓ -1ÏÂß -1ÏËÔ -1ÏÔÄ -1ÏÔÐ -1Ð2ÌÅÎË -1Ð2ÌÅÎÏË -1Ð2ÌÅÎÏÞ -1Ð2ÌÅÔ -1Ð2ÓÁÌÍ -1ÐÅ. -1ÐÅÎÚ -1ÐÅÞ -1ÐÉÓ -1ÐÌÁ× -1ÐÌÁÚ -1ÐÌÅ2Ó1Ë -1ÐÌÉË -1ÐÌÏÓ1Ë -1ÐÌÙ -1ÐÏ -1ÐÒ -1ÐÔÉÈ -1ÐÕ. -1ÐÑ -1Ò2×É. -1Ò2×ÉÔÅ. -1ÒÁ -1ÒÁÌÇ -1ÒÅÇÉ -1ÒÅÚÁ -1ÒÅËÌÁ -1ÒÉÓÏ -1ÒÏÓÛ -1ÒÙ -1Ó2ËÁÆ -1Ó2ËÌÏÎ -1Ó2ËÒÅ1ÓÔ -1Ó2ËÒÅ -1Ó2ÐÁÌØ -1Ó2ÐÏÓÁ -1Ó4Ô×ÏÒÞ -1ÓÁ -1Ó× -1ÓÅ -1ÓÖ -1ÓÉ -1ÓËÏÐ -1ÓÌ -1ÓÏ -1ÓÐ2ÌÀ. -1ÓÒ -1ÓÔÏ -1ÓÔÒÏ× -1ÓÕ -1ÓÆÅ -1ÓÈÅ -1ÓÞÁÓ -1ÓÞÉÔ -1Óß2 -1ÓÙ -1ÓÑ -1Ô2ËÁÎ -1Ô2ÒÅ2Ú1× -1Ô2ÒÑÓ -1Ô2ÒÑÈ -1Ô4×ÅÒÄ -1Ô4×ÏÒ -1ÔÁËÔ -1ÔÅËÁ -1ÔÅËÛ -1ÔÅÒÌ -1ÔÅÞÅ -1ÔËÎ -1ÔÌÅ -1ÔÏÌË -1ÔÏÒÓ -1ÔÏÒà -1ÔÏÞÎ -1ÔÏÝ -1ÔÒÅ2Ó1Ë -1ÔÒÅ -1ÔÒÉ -1ÔÒÕ -1ÔÑÎÕ -1ÕÚÌ -1Æ2ÔÏÒ -1ÆÁ -1ÆÉ -1ÆÌ -1ÆÏ -1ÆÒ6 -1ÆÔÏÎÇ -1ÆÙ -1È2ÌÙÎ -1È× -1ÈÉ -1ÈÌÅ -1ÈÌÏÒ -1ÈÒ -1ÈÕ. -1ÃÁÍ -1ÃÁÈ. -1Ã× -1ÃÅ -1ÃÉ -1ÃÏ -1ÃÕ. -1ÃÙ -1ÞÅÌ -1ÞÉ× -1ÞÉË -1ÞÌÁ -1ÞÌÅ -1ÞÏ -1ÞÔ -1ÞÈ -1Û2× -1Û2ËÏÌ -1Û2ÍÙ2Ç1Î -1ÛÉ2Â1Ì -1ÛÐÅ -1ÛÐÉÌ -1ÛÔÅ -1ÛÔÕ -1ÛÀ -1ÝÉ -1Ü2Ë -2Á1ÍÁ -2Á3Ï -2Â1Ä -2Â1ÌÅÎ -2Â1ÌÑ -2Â1Î -2Â1Ô -2Â1à -2Â1Û -2Â5Ë -2ÂÌÁÓ -2ÂÌØ -2ÂÒ. -2ÂÒØ -2×1ÌÁ -2×1ÌÅÎ -2×1ÌÉ -2×1ÌÀ -2×1ÌÑÅ -2×1ÌÑÌ -2×1ÌÑÀ -2×1ÍÉ -2×1ÒÅ. -2×1ÒÏ -2×1ÒÙ. -2×1ÔÅÒÐ -2×ÌØ -2×Ò. -2Ç1 -2Ç1Ë -2Ç1Í -2Ç1Ð -2Ç1Ó -2Ç1Û -2Ç5Ô -2ÇÒÏÐ -2Ä1ÉÎÓÔÉ -2Ä1Ë -2Ä1Í -2Ä1ÒÏ. -2Ä1Ó -2Ä1Æ -2Ä3Û2 -2ÄÖÓ -2ÄÎÙ -2ÄÏÂÌÁÞ -2ÄÏËÔ -2ÄÒÓ -2ÄØ3ÔÅ. -2Å1ËÏ -2Å1Ï -2ÅÎÒ -2Ö1Ë -2Ö1à -2ÖÁ×Å -2ÖÁ×Ì -2ÖÄÌ -2ÖÄØ -2Ú1 -2Ú1ÄÁ -2Ú1ÉÎÔ -2Ú1ÉÎÆ -2Ú1Ë -2Ú1Ó -2ÚÄÎ -2ÚÎÁ. -2ÚÎÙ -2É1×Ù -2ÉÍÅÎÅ -2Ë1 -2Ë1Ç -2Ë1Ë -2Ë1ÌÁ. -2Ë1ÌÁË -2Ë1ÌÉ. -2Ë1ÌÏ. -2Ë1Í -2Ë1Ô -2Ë1à -2Ë1Û -2ËÁÚË -2ËÌ. -2ËÌØ -2ËÎ -2ËÓ -2Ì1Î -2Ì1ÏÒÇ -2Í1×2 -2Í1ÉÚÄ -2Í1Ì -2Í1Û -2Î1Ó -2Î1à -2Î1Û -2ÎÂÅ -2ÎÅ×Î -2ÎÏÔÄ -2ÎÑÛ -2Ï1Á2 -2Ï1Ç -2Ï1ÚÁ -2Ï1É -2Ï1ÒÙ -2Ï1ÓÏ -2Ï1ÔÅ -2Ï1ÔÒ -2Ï1Õ2 -2Ï1ÈÉ -2Ï1Ü -2Ï5ÈÒÏ -2Ï× -2ÏÌ -2ÏÍ -2ÏÐÉÒ -2ÏÓÔÁÌ -2ÏÓÆ -2ÏÆ -2Ð1Ë -2Ð1ÌÀ. -2Ð1ÌÀÓØ. -2Ð1Í -2Ð1Î -2Ð1Ð -2Ð1ÓÉÓ -2Ð1ÓÔ -2Ð1ÔÏÍ -2Ð1Æ -2Ð1à -2Ð1Þ -2Ð1Û -2Ð3ÔÕ -2ÐÓ. -2ÐÓÅ -2ÐÓÏ -2ÐÓÕ -2ÐÓÙ -2Ò1ÏÒÇ -2Ò1ÕËÓ -2ÒÉÓÐ -2Ó1ÌÉÒÕ -2Ó1ÌÏË -2Ó1ÌÏà -2Ó1ÍÕ -2ÓÂÕ -2ÓË. -2ÓËÎ -2ÓÌØ -2ÓÍÅÎ. -2ÓÎÙ -2ÓÓË -2ÓÔ×. -2ÓÔÅÒÌ -2ÓÔË -2ÓÔÎ -2ÓÔØ. -2ÓÆÏÒ -2ÓÜ2 -2ÓÑÚ -2Ô1×ÅÊ -2Ô1Ç -2Ô1ÉÎÆ -2Ô1Í -2Ô1Î -2Ô1Ð -2Ô1Ó -2Ô1Æ -2Ô1à -2Ô1Ý -2Ô1Ü -2ÔÁÍÐ -2ÔÏÍÝ -2ÔÏÎÇ -2ÔÒ. -2ÔÒÁÂÏ -2ÔÒ -2ÔÒÇ -2ÔÒÄ -2ÔÒÍ -2ÔÒÐ -2ÔÒÒ -2ÔÒÆ -2ÔÕÞ -2ÔØ. -2Æ1× -2Æ1ÌÅÎ -2Æ1Î -2Æ1ÏÒÇ -2Æ1Ó -2È1×Å -2È1Ç -2È1Ó -2È1Õ2Ç -2Ã1Ç -2Ã1Ú -2Ã1Ë -2Ã1Ì -2Ã1Í -2Ã1Ï2Ä -2Ã1ÏÔ -2Ã1Ð -2Ã1Ó -2Ã1Ô -2Þ1Í -2ÞÔÍ -2Û1Æ -2Ý1Î -2ÀÍ -2ÀÀ. -2ÀÑ. -2ÑÀ. -2ÑÑ. -3×2ÌÉÑ -3Å×ÒÏÐ -3Ú2×ÕÞ -3ÚÉÓ -3É2ÍÅÎÏ -3É2ÍÅÎÕ -3Ë6ÎÉÖ -3ÎÁÑ -3ÎÉË -3ÎÕÀ -3ÎÙ -3Ï2Ô1ÒÑÄ -3Ð2ÓÉÈ -3ÐÌÁÎ -3Ó2ÌÁ× -3Ó2ÌÏ× -3Ó2ÌÕÖ -3Ó2ÐÏÓÏ -3ÈÏÒ -3Þ2ÍÏË -3ÞÉÊ -5ÂÉ2Ï -5ÂÏÔ -5ÂÏà -5×ÁÑ -5×ÕÁ -5Ä×ÉÎÕ -5ÄÅ -5ÄÏ. -5ÄØÀ -5ÖÅ× -5ÚÏ. -5ÚØÀ -5ÉÎÖ -5ÉÎÓÐ -5Ë6ÔÏ. -5ËÏÁ -5ËÏÒÙ -5Ì6ÖÅÛ -5ÌÉÇ -5ÌÉÃÏ -5ÌÉÞÉ -5ÍÉÊ -5ÍÉÎÇ -5ÍÏÔÉ -5ÎÁÐ -5ÎÁà -5ÎÉ -5ÏÔËÒ -5ÐÏÓÙ -5ÐÒÏÅ6 -5Ó2ÎÁ -5ÓËÏÑ -5ÓÍÅÓ -5ÓÍÙ -5ÓÔÙ -5ÔÅÞØ -5ÔÉÇÅ -5ÔÉÚ. -5ÔÕÄÙ -5ÔÕÛÅ× -5ÈÏÚ -5ÈÏÍ -5ÈÏÕ -5ÃÁ. -5ÞÁÎ -5ÛÌÏ -5ÛÔÒ -6Â1 -6Â1Ç -6Â1Í -6Â1Ó2 -6Â1Ý -6ÂÌ. -6ÂØ. -6×5ÒÁà -6×Î. -6×ÓË -6ר. -6Ç5ÌÁÊ -6ÇÌ. -6ÇÎ. -6ÇÒ. -6ÇÒÅË -6Ä1 -6Ä5ÒÏÚ -6ÄÖ. -6ÄÚ. -6ÄÎ. -6ÄÏÔÄ -6ÄÒ. -6ÄÔ. -6ÄØ. -6Å×ÏÌ -6Å×ÙÄ -6ÖÄ. -6Ú1Ö -6Ú1Í -6Ú1ÒÕ -6ÚÎÁÑ -6ÚØ. -6ÉÐ -6Ê1 -6Ë1Æ -6Ë×. -6ËÅÁ -6ËÒ. -6Ì1Ó2 -6ÌØ. -6ÌØÛ -6Í1 -6Í1Í -6Í1Ð -6Í1Ô -6Í1ÞÅ -6ÍÓ. -6ÍØ. -6ÎÒ. -6ÎØ. -6Ï5ÂÁ -6ÏÄÁÒ -6ÏËÌ -6ÏËÏÌ -6ÏÈÏÒ -6ÐÌ. -6ÐÌØ. -6ÐÒ. -6ÐÒØ. -6ÐÔ. -6ÐØ. -6Ó1Ú -6Ó5ÞÕ -6ÓËÏÎ -6ÓÌ. -6ÓÌÑÍ -6ÓÒ. -6ÓÓ. -6ÓÓÔ -6ÓÔ. -6ÓÔÒ. -6ÓÔÕÄÙ -6ÓÔØÄ -6ÓÔØÓ -6ÓØ. -6Ô1 -6Ô1ÒÅÄ -6Ô1Ô -6Ôר. -6ÔÉÎÖ -6ÔÌ. -6ÔÏÍÓ -6ÔÒ× -6ÔÒÕÐ -6ÔÞ. -6ÔÛ. -6ÆÒ. -6ÆØ. -6È1Þ -6È5×É -6È×. -6ÈÒØ. -6ÈÕÅÍ. -6ÈÕÊ. -6ÈÕÀ. -6ÈÕÑ. -6ÃÅÎÎÉ -6Þ1Ë -6Þ5ÌÅÇ -6Þ5ÌÅÖ -6ÞÔ. -6ÞØ. -6Û1 -6Û×. -6ÛÌ. -6ÛØ. -6ÝØ. -6ß1 -Á1Á -Á1ÂÁ -Á1ÂÅ -Á1ÂÉ -Á1ÂÏ -Á1ÂÒ -Á1ÂÕ -Á1ÂÈ -Á1ÂÙ -Á1ÂØÅ -Á1ÂØÉ -Á1ÂØÀ -Á1ÂØÑ -Á1ÂÑ -Á1×Á -Á1×Ï -Á1×Õ -Á1×Ù -Á1רŠ-Á1×ØÉ -Á1רÀ -Á1×ØÑ -Á1×Ü -Á1×À -Á1×Ñ -Á1ÇÁ -Á1ÇÅ -Á1ÇÉ -Á1ÇÌ -Á1ÇÏ -Á1ÄÁ -Á1Ä×ÏÒ -Á1ÄÅ -Á1ÄÉ -Á1ÄÏ -Á1ÄÒÁ -Á1ÄÕ -Á1ÄÃÁÔ -Á1ÄÙ -Á1ÄØÉ -Á1ÄØÀ -Á1ÄÀ -Á1ÄÑ -Á1Å -Á1ÖÁ -Á1ÖÅ -Á1ÖÖ -Á1ÖÉ -Á1ÖÍ -Á1ÖÏ -Á1ÖÕ -Á1ÖØÅ -Á1ÖØÉ -Á1ÖØÀ -Á1ÖØÑ -Á1ÚÁ -Á1ÚÅ -Á1ÚÉ -Á1ÚÏ -Á1ÚÙ -Á1ÚØÅ -Á1ÚØÀ -Á1ÚØÑ -Á1ÚÀ -Á1ÚÑ -Á1É -Á1ËÁ -Á1ËÅ -Á1ËÉ -Á1ËÏ -Á1ËÕ -Á1ËÙ -Á1ÌÁ -Á1ÌÅ -Á1ÌÏ -Á1ÌÕ -Á1ÌÙ -Á1ÌØÅ -Á1ÌØÉ -Á1ÌØÑ -Á1ÌÀ -Á1ÌÑ -Á1ÍÅ -Á1ÍÉ -Á1ÍÏ -Á1ÍÕ -Á1ÍÙ -Á1ÍØÅ -Á1ÍØÉ -Á1ÍØÀ -Á1ÍØÑ -Á1ÍÑ -Á1ÎÁ -Á1ÎÅ -Á1ÎÉ -Á1ÎÏ -Á1ÎÕ -Á1ÎÙ -Á1ÎØÅ -Á1ÎØÉ -Á1ÎØÀ -Á1ÎØÑ -Á1ÎÀ -Á1ÎÑ -Á1Ð -Á1Ò6Á -Á1ÒÅ -Á1ÒÉ -Á1ÒÏ -Á1ÒÕ -Á1ÒÙ -Á1ÒØÅ -Á1ÒØÉ -Á1ÒØÀ -Á1ÒØÑ -Á1ÒÀ -Á1ÒÑ -Á1Ó2ÛÅ -Á1ÓÁ -Á1ÓÅ -Á1ÓÉ -Á1ÓÏ -Á1ÓÔÁ -Á1ÓÔÅ -Á1ÓÔÉ -Á1ÓÔÕ -Á1ÓÔÙ -Á1ÓÔØÅ -Á1ÓÔØÀ -Á1ÓÔØÑ -Á1ÓÔÀ -Á1ÓÔÑ -Á1ÓÕ -Á1ÓÙ -Á1ÓØÅ -Á1ÓØÉ -Á1ÓØÀ -Á1ÓØÑ -Á1ÓÀ -Á1ÔÁ -Á1ÔÅ -Á1ÔÉ -Á1ÔÏ -Á1ÔÒ -Á1ÔÕ -Á1ÔÙ -Á1ÔØÅ -Á1ÔØÉ -Á1ÔØÀ -Á1ÔØÑ -Á1ÔÀ -Á1ÔÑ -Á1Õ -Á1ÆÁ -Á1ÆÅ -Á1ÆÉ -Á1ÆÏ -Á1ÆÕ -Á1ÆØÑ -Á1ÆÑ -Á1ÈÁ -Á1ÈÅ -Á1ÈÉ -Á1ÈÏ -Á1ÈÕ -Á1ÃÁ -Á1ÃÅ -Á1ÃÉ -Á1ÃÏ -Á1ÃÕ -Á1Þ2ÎÅ -Á1ÞÁ -Á1ÞÅ -Á1ÞÉ -Á1ÞÕ -Á1ÞØÅ -Á1ÞØÉ -Á1ÞØÀ -Á1ÞØÑ -Á1ÛÁ -Á1ÛÅ -Á1ÛÉ -Á1ÛÏ -Á1ÛÕ -Á1ÛØÅ -Á1ÛØÉ -Á1ÛØÀ -Á1ÛØÑ -Á1Ý -Á1Ü1 -Á1À -Á1Ñ -Á2×1ÌÑ -Á2×1Ð -Á2×1ÒÁ -Á2×ÏÔ -Á2ÄÙÎ -Á2ÚÒÉ -Á2Î1Ï -Á2Î5ÕÚ -Á2Ð1Ó -Á2Ð1Ô -Á2Ó1ÔÉÒ -Á2ÓËÏÐ -Á2ÕÌÅ -Á2ÕÍ -Á2ÕÎ -Á2ÕÓ -Á2ÕÜ -Á2Û1ÌÙ -Á2ÜÒ -Á3ÇÕ -Á3É2Ç1Ò -Á5×Å -Á5×É -Á5Ä×É -Á5Ú6Õ -Á5ÌÉ -Á5ÓÔÒÙ -Á6ÎÉÎÓ -Á6ÔÏÍÎ -ÁÁ2Ð1 -ÁÇ1×Á -ÁÇ1Ä -ÁÇÁ5Ó6 -ÁÄ1Á2ÇÅÎ -ÁÄ1ÒÕÇÁ -ÁÄ5ÒÅÚ -ÁÄÉ2Ï -ÁÄØ2 -ÁÅ2ÄÉ -ÁÚ1×Å -ÁÚ1×É -ÁÚ1×Ï -ÁÚ1Ò -ÁÉ6Ú5 -ÁÊÍ2Á -ÁË1Î -ÁË1Ó -ÁËÏÐ5Ì -ÁÌØ1Ä -ÁÍ1ÎÏ -ÁÍ1Þ -ÁÎ1Ò -ÁÎ2ËÒÏ -ÁÎ2ÓËÒ -ÁÎ2ÓÐ -ÁÎ2ÓÕÒ -ÁÎ2Óà -ÁÎÁ2Ó3Î -ÁÎÓ1Õ -ÁÏ2ÓÔ -ÁÏ6Ë -ÁÐ1ÒÅÌ -ÁÒ2Ô1ÏÒ -ÁÒÔ2Ò -ÁÓ1Ë -ÁÓ1ÐÕ -ÁÓ1È -ÁÓ1Þ -ÁÓ3ÐÏ -ÁÓ5ÌÅÔ -ÁÓ5ÌÑÍ -ÁÓ5ÌÑÈ -ÁÓ5ÍÉ -ÁÓÓ2ÍÅ -ÁÓÓ6Ð -ÁÓÔ1×Õ -ÁÓÔÁ2Ð1 -ÁÔ1Ì -ÁÔ5×É -ÁÔÈ1Ì -ÁÕ2ÞÉ -ÁÕÔ1Ò -ÁÕÜ1Ò -ÁÆ1ÒÉ -ÁÞ1Ô -ÁÛ1ÌÉ× -ÁÛ1ÔÁ -ÁÑ2Ú -Â1×Á -Â1×Ï -Â1×Ñ -Â1Ö -Â1Ú6 -Â1ÌÁ× -Â1ÌÅÇ -Â1ÌÏÖ -Â1ÌÏÍ -Â1ÒÁÓÔ -Â1ÒÙ× -Â1Æ -Â1È -Â1Þ -Â5ÌÉÚÁ -ÂÁÓ1Í -ÂÅ2Ä1Ò -ÂÅ2Ú1Õ4Ó -ÂÅ2ÚÙ -ÂÅ2Ó1Ë -ÂÅ2ÓÔÁ -ÂÅÇ1Ì -ÂÅÇ1Î -ÂÅÚ1Ä2 -ÂÅÚ5× -ÂÅÚÙ2Ú1× -ÂÅÓ1È -ÂÅÓ1Þ -ÂÅÓ3Ð -ÂÅÓÓ2 -ÂÉ5ÓÔÒ -ÂÉÏ5Ó -ÂÉÓ2Ë1× -ÂÌ1ÉÓÐ -ÂÌÅ2Ó1Ë -ÂÏ1Ä6Ò -ÂÏ1ÖÖ -ÂÏ1Ú -ÂÏ1Ò× -ÂÏ1Ó -ÂÏ2ÅÓ -ÂÏ2ÍÞ -ÂÏ2ÓÓ -ÂÏ3Í2ÌÅ -ÂÏÚ2Ì -ÂÒÁ6ÓÌ -ÂÓÔ6 -ÂÕÇ1Ì -×1 -×1× -×1Ç -×1Ä -×1Ë -×1ÌÁÇ -×1ÍÁ -×1ÍÏ -×1Î -×1Ó2 -×1Ô -×1Æ -×1È -×1à -×1Þ -×1Û -×1Ý -×2ÎÕÛ -×2ÈÏÖ -×2ÞÅÒ -×5ÒÁÓ -×6ÂÉ -×6ËÕÓ -×6ÓÅÇ -×6ÈÏÄ -×Á1Ä -×Á2ÄÌ -×Á2ÄÎ -×ÁÎ5Ó6 -×ÁÈ1 -×ÄÏ1Ó -×Å2Ä1Ò -×Å2Ó1Ë -×Å2ÓÔ1× -×ÅÚ1ÄÏ -×ÅÒÔ1ÌÑ -×ÅÔ3×2 -×ÅÔ4×3Ì -×Ú6Ò -×ÚÙÓ5 -×É2ÁÍ -×É2Â1Ò -×É2ÚÎ -×É5ÁÆ -×É5ÏÌ -×ÉÁ1Ó -×Ë1Î -×Ï1Ä×Ï -×Ï1Ð -×Ï2Ö3Ö -×Ï2Ó1ÐÅ -×Ï2Ó3ÔÏË -×Ï2Ó3ÔÏÞ -×Ï2ÓÔÒ -×Ï3Ú2ÄÁÎ -×ÏÚ1× -×ÏÉ2Ó -×ÏÓ1Ë -×ÐÏ6Ì -×ÒÁ2Ö5Ä -×ÒÏ5Ô -×ÔÏ3Ë2 -×Õ1Ú -×Õ1ÓÔ -×Õ5Ç -×ÞÅ6Ô5 -×Ù1ÓË -×Ù1ÓÐ -×Ù1Ô× -×Ù1È -×Ù1Û -×Ù5Ð -×ÙÐÕ2Ë1 -Ç1Ç -Ç1Ú -Ç1ÌÑÅ -Ç1ÌÑÔ -Ç1ÌÑÀ -Ç1Þ -Ç2ÎÉ× -Ç2ÎÏÍ -Ç2ÒÁ -ÇÁ1ÓÔ -ÇÁ2Õ -ÇÅ2ÏÄ -ÇÅ2ÏÐ -ÇÅ2ÏÓ -ÇÅ2Ïà -ÇÅ6Ï -ÇÉ1Ó -ÇÉ2Â1Ì -ÇÉ2Ä1Ò -ÇËÏ1× -ÇÏ1Ú -ÇÏ1Ð -ÇÏ2ÚÌ -ÇÏ2Ó1Á -ÇÏ2Ó -ÇÏÓ3Ó -ÇÒÉ×1Ë -ÇÒÏ2Í1Þ -ÇÓ2ÛÉ -Ä1×Á -Ä1×Å -Ä1×ÉÄ -Ä1×ÉÓ -Ä1×ÏÄ -Ä1Ç2 -Ä1Ä -Ä1ÚÁ -Ä1Ú× -Ä1ÚÏ -Ä1Ì -Ä1Î -Ä1Ð -Ä1ÒÁÓ -Ä1ÒÅÖ -Ä1ÒÕ -Ä1ÒÙ× -Ä1ÒÑÄ -Ä1Ô -Ä1È -Ä1Þ -Ä2×ÏÑ -Ä5ÚÅÍ -Ä5ÚÉ -ÄÁ4Ï -Ä×Õ1Û -Ä×Õ2È1 -ÄÄ2× -ÄÅ1ÓÔ -ÄÅ1È -ÄÅ2ÅÓ -ÄÅ2Ú1Á2 -ÄÅ2Ú1Ï2 -ÄÅ2Ï -ÄÅÓ2Ë -ÄÉ2ÁÄ -ÄÉ2ÁÍ -ÄÉ2×1Ì -ÄÉ2Ï5Ó -ÄÉ2Ï -ÄÉ2Ó1Å -ÄÉ5ÏÎ -ÄÉ5È -ÄÉÓ1ÔÒ -ÄÍÏ1Ó -ÄÎÏ5Ä -ÄÏ1ÂÒ -ÄÏ1Ä2 -ÄÏ1Ú -ÄÏ1Ð -ÄÏ1Ò× -ÄÏ1Ó2Ð -ÄÏ1ÓÎ -ÄÏ1Û2 -ÄÏ2ÒÕ -ÄÏ6ÂÌÁ -ÄÏÈ1Ì -ÄÒÏ2Ö3Ö -ÄÒÑ2Â1 -ÄÓ2Î -ÄÓ3ËÎ -ÄÕ1Ð -ÄÕ1ÓÔ -ÄÕ2Ï -ÄÕ2Ð3Ì -ÄÙÍ1Î -ÄÜ1Ç -Å1Á -Å1ÂÁ -Å1ÂÅ -Å1ÂÉ -Å1ÂÏ -Å1ÂÒ -Å1ÂÕ -Å1ÂÙ -Å1ÂØÅ -Å1ÂØÀ -Å1ÂØÑ -Å1ÂÀ -Å1ÂÑ -Å1×Á -Å1×Å -Å1×É -Å1×Ï -Å1×Õ -Å1×Ù -Å1רŠ-Å1רÀ -Å1×ØÑ -Å1×À -Å1×Ñ -Å1ÇÁ -Å1ÇÄ -Å1ÇÅ -Å1ÇÉ -Å1ÇÌÁÍ -Å1ÇÏ -Å1ÇÕ -Å1Ä2ÌÉÎ -Å1ÄÁ -Å1ÄÅ -Å1ÄÉ -Å1ÄÏ -Å1ÄÕ -Å1ÄÙ -Å1ÄØÀ -Å1ÄÀ -Å1ÄÑ -Å1Å -Å1ÖÁ -Å1ÖÅ -Å1ÖÏ -Å1ÖÕ -Å1ÖØÅ -Å1ÖØÉ -Å1ÖØÀ -Å1ÖØÑ -Å1ÚÁ -Å1ÚÅ -Å1ÚÉ -Å1ÚÏ -Å1ÚÕ -Å1ÚÙ -Å1ÚØÀ -Å1ÚØÑ -Å1ÚÀ -Å1ÚÑ -Å1É -Å1ËÁ -Å1Ë× -Å1ËÅ -Å1ËÉ -Å1ËÕ -Å1ÌÁ -Å1ÌÅ -Å1ÌÉ -Å1ÌÏ -Å1ÌÕ -Å1ÌÙ -Å1ÌØÅ -Å1ÌØÉ -Å1ÌØÑ -Å1ÌÀ -Å1ÌÑ -Å1ÍÁ -Å1ÍÅ -Å1ÍÏ -Å1ÍÕ -Å1ÍÙ -Å1ÍØÅ -Å1ÍØÉ -Å1ÍØÀ -Å1ÍØÑ -Å1ÍÑ -Å1ÎÁ -Å1ÎÅ -Å1ÎÉ -Å1ÎÏ -Å1ÎÕ -Å1ÎÙ -Å1ÎØÅ -Å1ÎØÉ -Å1ÎØÀ -Å1ÎØÑ -Å1ÎÜ -Å1ÎÀ -Å1ÎÑ -Å1Ï2ËÒ -Å1ÐÁ -Å1ÐÅ -Å1ÐÉ -Å1ÐÏ -Å1ÐÕ -Å1ÐÙ -Å1ÐØÅ -Å1ÐØÉ -Å1ÐØÀ -Å1ÐØÑ -Å1ÐÑ -Å1ÒÁ -Å1ÒÅ -Å1ÒÉ -Å1ÒÏ -Å1ÒÕ -Å1ÒÙ -Å1ÒØÅ -Å1ÒØÀ -Å1ÒØÑ -Å1ÒÀ -Å1ÒÑ -Å1Ó2Ç -Å1Ó2ËÌÁÄ -Å1Ó2ÐÏÔ -Å1ÓÁ -Å1Ó -Å1ÓÄ -Å1ÓÅ -Å1ÓÉ -Å1ÓË -Å1ÓÍ -Å1ÓÏ -Å1ÓÏË. -Å1ÓÔÁ -Å1ÓÔ× -Å1ÓÔÅ -Å1ÓÔÉ -Å1ÓÔÒ -Å1ÓÔÕ -Å1ÓÔÙ -Å1ÓÔØÅ -Å1ÓÔØÀ -Å1ÓÔØÑ -Å1ÓÔÀ -Å1ÓÔÑ -Å1ÓÕ -Å1ÓÙ -Å1ÓØÅ -Å1ÓØÉ -Å1ÓØÀ -Å1ÓØÑ -Å1ÔØÅ -Å1ÔØÉ -Å1ÔØÀ -Å1ÔØÑ -Å1ÔÀ -Å1Õ2 -Å1ÆÁ -Å1ÆÅ -Å1ÆÉ -Å1ÆÏ -Å1ÆÕ -Å1ÈÁ -Å1ÈÅ -Å1ÈÉ -Å1ÈÏ -Å1ÈÕ -Å1ÃÁ -Å1ÃÅ -Å1ÃÉ -Å1ÃÏ -Å1ÃÕ -Å1ÞÁ -Å1ÞÅ -Å1ÞÉ -Å1ÞÕ -Å1ÞØÅ -Å1ÞØÉ -Å1ÞØÀ -Å1ÞØÑ -Å1ÛÁ -Å1ÛÅ -Å1ÛÉ -Å1ÛÌ -Å1ÛÏ -Å1ÛÔÁ -Å1ÛÕ -Å1ÛØÀ -Å1ÝÁ -Å1ÝÅ -Å1ÝÉ -Å1ÝÏ -Å1ÝÕ -Å1ÝØÀ -Å1Ü -Å1À -Å1Ñ -Å2×1ÍÏ -Å2×1ÒÉÔ -Å2Ä1Ï2Ý -Å2ÏÄÉ -Å2ÏÎÁ -Å2ÏÒÏ -Å2ÐÓÉ -Å2Ò1Õ2Ð -Å2Ó1ÂÉ -Å2Ó3ÐÕ -Å2È1Õ2Þ -Å2ÈË -Å3ÖÉ -Å3Ú×ÏÎ -Å3ÏÌÁ -Å3ÏÎ. -Å3ÔÁ -Å3ÔÅ -Å3ÔÉ -Å3ÔÏ -Å3ÔÕ -Å3ÔÙ -Å3ÔÑ -Å5ÍÉ -Å5ÏÌ. -Å5ÏÌÙ -Å5ÏÈÌ -Å5ÓÔÁ. -Å6ÓÔÉÇ -ÅÁ2ÄÅ -ÅÁ2Ú -ÅÁ2Ô1Ò -ÅÁ6ÄÁ -Å×2ÎÉÍ -Å×2ÎÑÔ -Å×Ï2Ó -ÅÇ1ÌÁ. -ÅÇ1ÌÏ -ÅÇ1ÌÙ -ÅÖ1Í -ÅÖ1Ò -ÅÖÁ6Ô -ÅÚ1×Ï -ÅÚ5×É -ÅÉ2Ç -ÅÉ2Ä -ÅÉ2Í -ÅË1Î -ÅË1ÓÔÕ -ÅÍ1ÎÅ -ÅÍ1ÎÏÇÏ -ÅÍ1Þ -ÅÎ1ÒÉ -ÅÏ1Ó -ÅÏ2 -ÅÏ2ÄÅÔ -ÅÏ2Ö -ÅÏ2ËÏÎ -ÅÏ2ÒÕ -ÅÏ2Þ -ÅÏ2Ý -ÅÏ6È× -ÅÏÂ1Ì -ÅÏÕ4 -ÅÐ1ÌÅ -ÅÐ1ÌÉ. -ÅÐ1ÔÁ -ÅÐ1ÔÏ -ÅÐ5ÔÅ -ÅÐ5ÔÉÞ -ÅÐ5ÔÕÒ -ÅÐÉ1Ô2Ò -ÅÒ1×ÁÔ -ÅÒ1ÔÑ -ÅÒ6ËÌ -ÅÒÅ1Ä2Ò -ÅÒÅ1Ä× -ÅÒÅ1Ú× -ÅÒÅ1Ð -ÅÒÅ1Ó2Ó -ÅÒÅ5ÇÎ -ÅÒÅÐ2Ì -ÅÒÉ1ÓË -ÅÒÉÓ2 -ÅÒÏ6 -ÅÓ1Ð -ÅÓ2ËÌÅ -ÅÓ2ËÏÛ -ÅÓ2ÐÁÓ -ÅÓ5ËÕÒ -ÅÓËÒÉÐ1 -ÅÔ1Ì -ÅÔ1Ò -ÅÔ2ÒÄ -ÅÕ3ÔÏ -ÅÈ1Ï2Ë -ÅÈ5Ï -ÅÛ1ÔÏ -Ö1 -Ö1Ö -Ö1Ú -Ö1Ì -Ö1ÍÁ -Ö1Î -Ö1Ð -Ö1Ó -Ö1Ô -Ö1Þ -Ö2ÖÅ -ÖÁÔ1× -ÖÅ1Ó2Ð -ÖÅ5Ä2 -ÖÅÏ2 -ÖÉ2×1Ì -ÖÉ2Ì1ÏÔ -ÖÉ2Ì1Õ2Ð -Ú1ÁËÔ -Ú1×Ë -Ú1×Ñ -Ú1Ç -Ú1Ä× -Ú1ÄÅ -Ú1ÄÉ -Ú1ÄÕ -Ú1ÄÙ -Ú1ÄÑ -Ú1Ú -Ú1Ì -Ú1ÎÅ -Ú1ÎÉ -Ú1ÎÏ -Ú1ÎÕ -Ú1ÎÀ -Ú1ÏÂÝ -Ú1ÏËÓ -Ú1ÏÒÇ -Ú1Ð -Ú1ÒÁ -Ú1ÒÏÄ -Ú1ÒÑÄ -Ú1Ô -Ú1à -Ú1Þ -Ú1Û -Ú1Ü -Ú2×ÕË -Ú2×ÑË -Ú2Ç1ÎÉ -Ú2Ç1ÎÕ -Ú2ÒÁË -Ú2ÒÁÞ -Ú5×ÅÔ -Ú5ÇÎÁ -Ú5ÄÏÍ -Ú5ÒÅÚ -Ú6×ÏÎ -Ú6ÔØ -ÚÁ1×ÞÅ -ÚÁ1Ç2 -ÚÁ1ÄÒ -ÚÁ1Ú2 -ÚÁ1Ë× -ÚÁ1Ò2Ä -ÚÁ1Ò2Ö -ÚÁ1Ó -ÚÁ1È -ÚÁ1Û -ÚÁ2ÛË -ÚÁ3ÔÍ -ÚÁ5Ô× -ÚÁ5Õ -ÚÁÅ2 -ÚÁÎ5Ó6 -ÚÁÓ2Î -ÚÁÓ4ÐÏ -ÚÁÔ2 -ÚÁÞ2Ô -ÚÁÑ6 -Ú×2Î -Ú×Å2Ô3× -ÚÄ2×Á -ÚÄÉ2Ó -ÚÚ2Ì -ÚÉ6ÎÉ -ÚÉ6ÏÎÏ -ÚÏ1 -ÚÏ1Ä2Ò -ÚÏ1Ú2 -ÚÏ1Í2Î -ÚÏ1Ò× -ÚÏ1Ó2 -ÚÏ1Ý -ÚÏ2ÂÉÌ -ÚÏ3Í2Ì -ÚÏË2 -É1Á -É1ÂÁ -É1ÂÅ -É1ÂÉ -É1ÂÏ -É1ÂÒ -É1ÂÕ -É1ÂÙ -É1ÂØÅ -É1ÂØÀ -É1ÂÀ -É1×2Ó -É1×Á -É1×Å -É1×É -É1×Ï -É1×Õ -É1רŠ-É1רÀ -É1×ØÑ -É1×Ñ -É1ÇÁ -É1ÇÅ -É1ÇÉ -É1ÇÌ -É1ÇÏ -É1ÇÕ -É1ÄÁ -É1ÄÅ -É1ÄÉ -É1ÄÏ -É1ÄÒ -É1ÄÕ -É1ÄÙ -É1ÄÀ -É1ÄÑ -É1Å -É1ÖÁ -É1ÖÅ -É1ÖÖ -É1ÖÉ -É1ÖÏ -É1ÖÕ -É1Ú2×ÅÚ -É1ÚÁ -É1ÚÅ -É1ÚÉ -É1ÚÎÁ -É1ÚÏ -É1ÚÒ -É1ÚÕ -É1ÚÙ -É1ÚØÀ -É1ÚÀ -É1ÚÑ -É1É -É1ËÁ -É1Ë× -É1ËÅ -É1ËÉ -É1ËÏ -É1ËÕ -É1ËÀ -É1ÌÁ -É1ÌÅ -É1ÌÉ -É1ÌÏ -É1ÌÕ -É1ÌÙ -É1ÌØÅ -É1ÌØÉ -É1ÌØÑ -É1ÌÀ -É1ÌÑ -É1ÍÁ -É1ÍÅ -É1ÍÏ -É1ÍÕ -É1ÍÙ -É1ÍØÅ -É1ÍØÉ -É1ÍØÀ -É1ÍØÑ -É1ÍÑ -É1ÎÁ -É1ÎÅ -É1ÎÉ -É1ÎÏ -É1ÎÕ -É1ÎÙ -É1ÎØÅ -É1ÎØÉ -É1ÎØÀ -É1ÎØÑ -É1ÎÀ1 -É1ÎÑ -É1Ï -É1ÐÁ -É1ÐÉ -É1ÐÌ -É1ÐÏ -É1ÐÕ -É1ÐÙ -É1ÐØÀ -É1ÐÀ -É1ÐÑ -É1ÒÁ -É1ÒÅ -É1ÒÉ -É1ÒÏ -É1ÒÕ -É1ÒÙ -É1ÒØÅ -É1ÒØÉ -É1ÒØÀ -É1ÒØÑ -É1ÒÀ -É1ÒÑ -É1Ó2ÎÉ -É1ÓÁ -É1ÓÅ -É1ÓÉ -É1ÓÏ -É1ÓÔÁ -É1ÓÔÅ -É1ÓÔÉ -É1ÓÔÒÁ -É1ÓÔÕ -É1ÓÔÙ -É1ÓÔØÅ -É1ÓÔØÀ -É1ÓÔØÑ -É1ÓÔÀ -É1ÓÔÑ -É1ÓÕ -É1ÓÙ -É1ÓØÅ -É1ÓØÉ -É1ÓØÀ -É1ÓØÑ -É1ÓÀ -É1Ô2ÒÁÇ -É1Ô2ÒÅÓ -É1Ô2ÒÏÎ -É1ÔÅ -É1ÔÉ -É1ÔÏ -É1ÔÕ -É1ÔÙ -É1ÔØÅ -É1ÔØÀ -É1ÔØÑ -É1ÔÀ -É1ÔÑ -É1Õ -É1ÆÁ -É1ÆÅ -É1ÆÉ -É1ÆÏ -É1ÆÕ -É1ÈÁ -É1ÈÅ -É1ÈÉ -É1ÈÏ -É1ÈÕ -É1ÃÁ -É1ÃÅ -É1ÃÉ -É1ÃÏ -É1ÃÕ -É1ÞÁ -É1ÞÅ -É1ÞÉ -É1ÞÕ -É1ÞØÅ -É1ÞØÉ -É1ÞØÀ -É1ÞØÑ -É1Û2Ð -É1ÛÁ -É1ÛÅ -É1ÛÉ -É1ÛÌ -É1ÛÏ -É1ÛÕ -É1ÛØÅ -É1ÛØÀ -É1ÛØÑ -É1ÝÁ -É1ÝÅ -É1ÝÉ -É1ÝÏ -É1ÝÕ -É1Ü -É1À -É1Ñ -É2Á1Ç -É2ÁÐ -É2ÁÆ -É2Å×ÏÄ -É2Ë1Þ -É2Ì1Á2à -É2Ï1Ó2Ë -É2ÏÐÒ -É2ÏÈ -É2Ïà -É2ÐÓÉ -É2Ó1ÔÉÎ -É2Ô1Ì -É2Û1ÌÙ -É2ÀÌ -É2ÀÎ -É5ÇÄ -É5ÍÉ -É5ÏÌÅ -É5ÐÅ -É5ÔÁ -É6Ð5ÔÉÚ -É6ÔÏÔ -ÉÁ1ÓË -ÉÁÓ2 -ÉÇ1Î -ÉÄ1à -ÉÄÉ3ÏÍ -ÉÄÉ5Á -ÉÅ2ÄÉ -ÉÚ1× -ÉÚ1Ä -ÉÚ1ÒÅÞ -ÉÚ2×Á -ÉÚ2ÇÎ -ÉÚÇ1ÎÅ -ÉÚÏ1Ô -ÉÚÏ2Â1Ò -ÉÚÏ2Ï -ÉÚÙÓ1 -ÉË1Î -ÉËÓ1ÔÕ -ÉÌÅ1Ð -ÉÌÅ2Ð1Ì -ÉÌØ1Ä -ÉÍ1Î -ÉÎÏ1Ä2ÒÁ -ÉÎÏ1Ó -ÉÎÓ2 -ÉÎÀ2Û -ÉÏ2ÓÔÁ -ÉÏ5ÓÐ -ÉÏÂ1ÒÅ -ÉÐ1ÔÁ -ÉÐ1ÔÅ -ÉÐ1ÔÏ -ÉÐ1ÔÕ -ÉÒ5× -ÉÓ1 -ÉÓ1Ë -ÉÓ1Í -ÉÓ1Ð -ÉÓ1ÔÅË -ÉÓ1Þ -ÉÓ5ÔÅà -ÉÓË1Î -ÉÓÔ1× -ÉÔ1×Á -ÉÔ1×Å -ÉÔ1Ò -ÉÔ5×Õ -ÉÕ2Ç -ÉÕ2Þ -ÉÕ6Ò -ÉÑ2Ä -Ê2Ä× -Ê2ÌØ -Ê2ÍÓ -Ê2Î× -Ê2Ó1 -Ê2ÓÎ -Ê2ÓÛ -Ê5Ï -Ê6Ó5Æ -ÊÅÒ1× -ÊËÏ5Ð -ÊÓ2ËÏ -ÊÈ2ÓË -Ë1Ä -Ë1ÎÁ -Ë1ÎÏ -Ë1Ð -Ë1ÓË -Ë1È -Ë1Þ -Ë2×ÁË -Ë2Ï1ÂÅÓ -Ë2Ó× -Ë2ÓÌ -Ë2ÓÔ1ÁË -Ë5Ö -Ë5ÌÉÊ -Ë5ÓÔÅ. -ËÁ1Ä -ËÁ1ÓÐ -ËÁ1ÓÔ -ËÁ2Ä1Ò -ËÁ2ÄÎ -ËÁ2Ö1Ä -ËÁ2Ð1Ì -ËÁ2Ð1ÒÅ -ËÁ3ÕÓ -ËÁÚ1ÎÁ -ËÁÍ5Î -ËÁÛ3Ì -Ë×Á2Ä1Ò -ËÅ1ÓÔ -ËÅ5ÇÌÉ -ËÅ5Ä -ËÅÐ1ÔÉ -ËÉ3Ï2Ó3Ë -ËÉ4Ó3Ì -ËÉ5Ï -ËÌÀ1Þ -ËÌÀ2ÞÎ -ËÎÏ2Ð3Ì -ËÏ1ÚÎÁÎ -ËÏ1ÓË -ËÏ2ÍÉÎ -ËÏ2Ó3Î -ËÏ2Ó× -ËÏ2ÔÌ -ËÏ5ÓÔ× -ËÏÇ2Î -ËÏÐÕ5 -ËÏÒ1× -ËÏÓ1ÍÏ -ËÏÓÔ1ÌÑ -ËÒÉ2Ï5 -ËÒÏ2ÐÌ -ËÓ1Ð -ËÓ1ÔÒ -ËÔ2ÒÉÓ -ËÕÓ1Ë -Ì1ÂÁ -Ì1ÂÉ -Ì1ÂÏ -Ì1× -Ì1Ç -Ì1Ä6 -Ì1ÖÁ -Ì1ÖÅ -Ì1ÖÉ -Ì1ÚÁ -Ì1ÚÅ -Ì1ÚÏ -Ì1ÚÙ -Ì1Ë -Ì1Ì -Ì1Í -Ì1Ð -Ì1Ô -Ì1Æ -Ì1È6 -Ì1à -Ì1ÞÁ -Ì1ÞÅ -Ì1ÞÉ -Ì1ÞÕ -Ì1ÞØ -Ì1Û6 -Ì1Ý -Ì2×Ë -Ì2×Î -Ì2×ÓÔ -Ì2ÇÁÔ -Ì2ÌØ -Ì2ÔË -Ì5ÂÙ -Ì6Ô5Ì -ÌÁÕ1 -ÌÅ1Ô2Ò -ÌÅ2Â1Ì -ÌÅ2Ï -ÌÅ2Ð1Ô -ÌÅÇ5Ì -ÌÅÎ2Ä1Ò -ÌÅÐ5ÌÏ -ÌÉ2Â1Ò -ÌÉ2×1Ì -ÌÉ2Ë1× -ÌÉ2Ð1Ì -ÌÉ2Ô1ÕÐ -ÌÉ2ÔÏ -ÌÉ5ÓÔÒ -ÌÉ6ÏÓ -ÌÉ6È5× -ÌË1Î -ÌÌÀ1 -ÌÏ1Ä6Ò -ÌÏ1Ú -ÌÏ1ÐÌ -ÌÏ1ÓËÕ -ÌÏ6ÂÏÒ -ÌÏÓ5ËÁ -ÌÏÈ5Ì -ÌÓ2ÔÏ -ÌÕ1Ä2 -ÌÕ1Ó -ÌÕ2Ä3Ë -ÌÕ2Ä3Ì -ÌÕ2Ä3Î -ÌÕ3Â2Ò -ÌÕ5Ô -ÌÆ2Ô -ÌØ2ÔÏÔ -ÌÀË1× -Í1Ç -Í1Ö -Í1Ú -Í1Ë -Í1ÎÁ -Í1ÎÅÅ. -Í1ÎÅÊ. -Í1ÎÏÅ -Í1ÎÏÓ -Í1Ó -Í1Æ -Í1à -Í2Í1Î -Í2ÍË -Í2Ó1ÏÒ -Í2ÓÔÉ -Í5ÎÅÐ -Í5ÎÉÊ -Í5ÎÏ× -Í5ÎÏÔ -Í5È -Í5Ü -Í6ÁÔ -Í6ÌØ -ÍÁ1Ó -ÍÁ2×Ú -ÍÁ2Ó1Ì -ÍÁ2Ô1Ò -ÍÁ2Õ -ÍÁ6ÞÔ -ÍÁÇ1Î -ÍÁÄ1ÒÉ -ÍÁÎ2Ä1ÁÒÍ -ÍÁÔ1× -ÍÅ2Ä1ÏÓÍ -ÍÅ2Ï -ÍÅ2Ó1Ë -ÍÅ2Þ1Ô -ÍÅÖÏ2Ô1 -ÍÅÔÅ2Ï -ÍÚ6Ä -ÍÉ2ÏË -ÍÉ6Ú5ÁÎ -ÍÉÒÏ3Ú2 -ÍÎÏÇÏ1 -ÍÏ1Í -ÍÏ1Ð -ÍÏ1ÓËÏ -ÍÏ2Ö3Ö -ÍÏ2Ô3Ò -ÍÏ3Ï -ÍÏÚ2Ç1Ì -ÍÏÓË1× -ÍÏÓÏ2Í3Î -ÍÐ2Ì -ÍÐÏ2Þ -ÍÓ2Î -ÍÕ1Ç -ÍÕ5Ó6Ë -ÍÙ4Ó3Ì -Î1 -Î1×2 -Î1Ç -Î1Ä -Î1Ö -Î1Ú -Î1Ë -Î1Ì -Î1Í -Î1Î -Î1Ð -Î1Ô -Î1Æ -Î1È -Î1Þ -Î1Ý -Î2Ä× -Î2ÄÇ -Î2ÄÌ -Î2ÄÎ -Î2Ó1ÌÑ -Î2Ó1Í -Î2ÓÎ -Î2ÓÆ -Î2ÔË -Î2ÔÌ -Î2ÔÒ1Á2Ç -Î2ÔÒÏË -Î2ÔÛ -Î2ÛÎ -Î6Äà -ÎÁ1Ú2 -ÎÁ1Ë× -ÎÁ1Í2ÎÏÇÏ -ÎÁ1ÍÎ -ÎÁ1Ò× -ÎÁ1È -ÎÁ1ÛÌ -ÎÁ1ÛÐ -ÎÁ3É×Î -ÎÁ3ÉÚ -ÎÁ3ÉÔ -ÎÁ5Ü -ÎÁÉ1Ó2Ë -ÎÁÏ2Ô -ÎÁÕ6Þ -ÎÇÏÕ5 -ÎÄ2ÓÐ -ÎÄ6Ú -ÎÄÅ2Ó1 -ÎÅ1×2Ä -ÎÅ1ÇÌ -ÎÅ1ÇÎ -ÎÅ1ÄÒ -ÎÅ1ÚÎ -ÎÅ1ÍÎ -ÎÅ1Ð2 -ÎÅ1Ó2Î -ÎÅ1Ó2Ð -ÎÅ1ÓÔ -ÎÅ1ÓÞ -ÎÅ1Ô2Ò -ÎÅ2Á3ÐÏ -ÎÅ2×ÒÁ -ÎÅ2ÒÏÔ -ÎÅ3Ï2ÇÒ -ÎÅ3Ï2ÄÉÎ -ÎÅ3Ï6Ó -ÎÅ5ËÓÔ -ÎÅ5ÒÖ -ÎÅ5Ó6È -ÎÅ×2Ð -ÎÅÄÏ1Ó -ÎÅÅ6 -ÎÅÉ2 -ÎÅÏ2Ð -ÎÅÏ2ÐÒ -ÎÅÏ2Ò -ÎÅÏ2È -ÎÅÏ2à -ÎÅÓ2Ë -ÎÅÔ2Ì -ÎÅÕ5ÓÔÒ -ÎÅÑ6 -ÎÉ1Ð -ÎÉ1ÓÔÒ -ÎÉ5ËÔ -ÎÉÌÁ6 -ÎË5ÒÏ -ÎËÏ1Ð -ÎÏ1Ú -ÎÏ1Ð -ÎÏ1Ô× -ÎÏ2ÐÔ -ÎÏ5Å -ÎÏ5Ï -ÎÏ5Û -ÎÏÍ5Î -ÎÏÐ2Ì -ÎÓÕ2Ò -ÎÔ2Ò -ÎÔÉÁ2 -ÎÔÉÏ2 -ÎÕ1ÓË -ÎÕ1Ô2Ò -Ï1ÂÅ -Ï1ÂÉ -Ï1ÂÏ -Ï1ÂÕ -Ï1ÂÙ -Ï1ÂØÅ -Ï1ÂØÉ -Ï1ÂØÀ -Ï1ÂØÑ -Ï1×2× -Ï1×2ÓÅ -Ï1×2Ô -Ï1×Á -Ï1×Å -Ï1×É -Ï1×Í -Ï1×Ï -Ï1×Õ -Ï1×Ù -Ï1רŠ-Ï1×ØÉ -Ï1רÀ -Ï1×ØÑ -Ï1×Ñ -Ï1ÇÁ -Ï1ÇÅ -Ï1ÇÉ -Ï1ÇÏ -Ï1ÇÕ -Ï1ÄÁ -Ï1ÄÅ -Ï1ÄÉ -Ï1ÄÏ -Ï1ÄÒÕ -Ï1ÄÕ -Ï1ÄÙ -Ï1ÄØÀ -Ï1ÄÀ -Ï1ÄÑ -Ï1Å -Ï1ÖÁ -Ï1ÖÅ -Ï1ÖÖÅ -Ï1ÖÉ -Ï1ÖÍ -Ï1ÖÏ -Ï1ÖÕ -Ï1ÖØÅ -Ï1ÖØÉ -Ï1ÖØÀ -Ï1ÖØÑ -Ï1ÚÅ -Ï1ÚÉ -Ï1ÚÏ -Ï1ÚÕ -Ï1ÚÙ -Ï1ÚØÅ -Ï1ÚØÉ -Ï1ÚØÀ -Ï1ÚØÑ -Ï1ÚÑ -Ï1ËÁ -Ï1Ë× -Ï1ËÅ -Ï1ËÉ -Ï1ËÏ -Ï1ËÕ -Ï1ÌÁ -Ï1ÌÅ -Ï1ÌÉ -Ï1ÌÕ -Ï1ÌÙ -Ï1ÌØÅ -Ï1ÌØÉ -Ï1ÌØÑ -Ï1ÌÀ -Ï1ÌÑ -Ï1ÍÁ -Ï1ÍÅ -Ï1ÍÉ -Ï1ÍÏ -Ï1ÍÕ -Ï1ÍÞ -Ï1ÍÙ -Ï1ÍØÅ -Ï1ÍØÑ -Ï1ÍÑ -Ï1ÎÁ -Ï1ÎÅ -Ï1ÎÉ -Ï1ÎÏ -Ï1ÎÕ -Ï1ÎÙ -Ï1ÎØÅ -Ï1ÎØÉ -Ï1ÎØÀ -Ï1ÎØÑ -Ï1ÎÀ -Ï1ÎÑ -Ï1Ï2 -Ï1ÐÁ -Ï1ÐÅ -Ï1ÐÉ -Ï1ÐÏ -Ï1ÐÕ -Ï1ÐÙ -Ï1ÐØÅ -Ï1ÐØÉ -Ï1ÐØÀ -Ï1ÐØÑ -Ï1ÐÑ -Ï1ÒÁ -Ï1Ò×ÁÔ -Ï1ÒÅ -Ï1ÒÉ -Ï1ÒÏ -Ï1ÒÕ -Ï1ÒØÅ -Ï1ÒØÀ -Ï1ÒØÑ -Ï1ÒÀ -Ï1ÒÑ -Ï1Ó2ËÌÁ -Ï1Ó2ÐÏÒ -Ï1Ó2ÔÏ -Ï1Ó2ÛÉ× -Ï1ÓÁ -Ï1Ó -Ï1ÓÅ -Ï1ÓÉ -Ï1ÓÎÅ -Ï1ÓÎÉÍ -Ï1ÓÐÅ -Ï1ÓÔÁ -Ï1ÓÔÅ -Ï1ÓÔÉ -Ï1ÓÔÒ -Ï1ÓÔÕ -Ï1ÓÔÙ -Ï1ÓÔØÅ -Ï1ÓÔØÉ -Ï1ÓÔØÀ -Ï1ÓÔØÑ -Ï1ÓÔÀ -Ï1ÓÔÑ -Ï1ÓÕ -Ï1ÓÞÅ -Ï1ÓÙ -Ï1ÓØÅ -Ï1ÓØÉ -Ï1ÓØÀ -Ï1ÓØÑ -Ï1ÓÀ -Ï1ÔÁ -Ï1ÔÏ -Ï1ÔÕ -Ï1ÔÙ -Ï1ÔØÅ -Ï1ÔØÉ -Ï1ÔØÀ -Ï1ÔØÑ -Ï1ÔÑ -Ï1ÆÁ -Ï1ÆÅ -Ï1ÆÉ -Ï1ÆÏ -Ï1ÆÕ -Ï1ÆØÅ -Ï1ÆØÉ -Ï1ÆØÀ -Ï1ÆØÑ -Ï1ÈÁ -Ï1ÈÅ -Ï1ÈÏ -Ï1ÈÕ -Ï1ÃÁ -Ï1ÃÅ -Ï1ÃÉ -Ï1ÞÅ -Ï1ÞÉ -Ï1ÞÌ -Ï1ÞÕ -Ï1ÞØÅ -Ï1ÞØÉ -Ï1ÞØÀ -Ï1ÞØÑ -Ï1Û2Ì -Ï1ÛÁ -Ï1ÛÅ -Ï1ÛÉ -Ï1ÛÏ -Ï1ÛÕ -Ï1ÛØÅ -Ï1ÛØÀ -Ï1ÝÁ -Ï1ÝÅ -Ï1ÝÉ -Ï1ÝÕ -Ï1ÝØÀ -Ï1À -Ï1Ñ -Ï2Â1ÒÁÖ -Ï2Â1ÒÁÚ -Ï2×1ÐÁ -Ï2×ÒÙ -Ï2Ä1Ï2ÂÏÌ -Ï2Ä1Ï2ÄÅÑ -Ï2Ä3ÒÁÖ -Ï2ÄÏÔÒ -Ï2Å×Ò -Ï2Ú1×ÏÌ -Ï2Ú1ÎÏ -Ï2Ú1ÎÕ -Ï2Ú1Ï -Ï2ÚÎÑ -Ï2ÚÙÍ -Ï2ÚØÔ -Ï2Ë1Á2Õ -Ï2ÎÎ -Ï2Æ1ÁË -Ï2Æ1ÒÁ -Ï2Û3ÌÙ -Ï3×2ÌÀ -Ï3ÌÏ -Ï3ÏÔÒ -Ï3Ó2ÂÅÒ -Ï3ÔÉ -Ï5Ä×ÉÇ -Ï5ÏÍ -Ï5ÐÔÅ -Ï5ÒÕ. -Ï5ÓÐÕ -Ï5ÔØ6Í -Ï5È6Ô -Ï5ÞÁ -Ï6Ô×Á -Ï6ÛÎ -ÏÂ1× -ÏÂ1Ï2Ó3Î -ÏÂ1ÒÁÝ -ÏÂ2ÌÕÄÉ -ÏÂ5ÌÉË -ÏÂ5ÌÉÞ -ÏÂ5ÒÁÄ -ÏÂ5ÒÁÍ -Ï×Ï5ÓÔÒ -Ï×ÙÓ2Ð -ÏÄ1ÒÁ -ÏÄ1ÒÏÓ -ÏÄ1Ü -ÏÄ2ÌÉÔ -ÏÄÉ5ÁÐ -ÏÄÏ1Ó -ÏÄÓ2Ð -ÏÄØ1ÑÞ -ÏÅ2Ä -ÏÅ2Ó -ÏÚ1ÄÏ -ÏÚ1ÒÏ -ÏÚ2ÄÏÒ -ÏÚ5ÄÀ -ÏÚÏ2Â1Ì -ÏÉ2Ç6 -ÏÉ2Ú -ÏÉ2ÍÅ -ÏÉ2ÍÕ -ÏÉ6Ï -ÏË1Ú -ÏË1ÎÕ -ÏË5ÎÅ -ÏÌÕ3Ä4 -ÏÌÕÏ2 -ÏÌØ1Ä -ÏÍ1ÎÏÇÏ -ÏÍ1Ò -ÏÍ2ÎÑ -ÏÎ2ÔÒÁÔ -ÏÎ6ÔÒÕ -ÏÎÓ2 -ÏÐ1ÔÁ -ÏÐ1ÔÉ -ÏÒ1ÉÓÐ -ÏÒ2Â1Ì -ÏÒ5ÔØ -ÏÒ5ÔÑ -ÏÒÁÓ6ÐÒ -ÏÓ1ËÁ. -ÏÓ1ËÁÍ -ÏÓ1ËÁÈ -ÏÓ1ËÅ -ÏÓ1ËÉ -ÏÓ1ËÏÊ -ÏÓ1ËÕ. -ÏÓ1ÍÅÔ -ÏÓ1ÍÏÓ -ÏÓ1ÐÙ -ÏÓ2Î -ÏÓ2Ó1Í -ÏÓ2Ó× -ÏÓ3ÎÏÇÏ -ÏÓ3ÎÏÀ -ÏÓ5ÂÁ -ÏÓ5ÍÉ -ÏÓ5ÎÉÔ -ÏÓ6ÐÌÅ -ÏÔ1× -ÏÔ1Ì -ÏÔ1ÒÁÚ -ÏÔ1Õ2Ö -ÏÔ1Õ2Ô -ÏÔ1Õ2Þ -ÏÔ2ÌÅ× -ÏÔÏ1Ä2ÒÁ -ÏÔÏ2ÞØ -ÏÔÕ2Á -ÏÔÕ2ÞÅ -ÏÆÏ2Ò -ÏÈ1ÒÉÓ -ÏÜ5ÔÉ -ÏÑ2× -ÏÑ2Ä -ÏÑ2Ú -ÏÑ6Ò -Ð1Ä -Ð1ÌÁ. -Ð1ÌÅÎ -Ð1ÌÀÔÓ -Ð1ÌÑ -Ð1ÓË -Ð1ÔÒ -Ð1ÔÕÁ -Ð1ÔÙ -Ð1ÔÑ -Ð1Ý -Ð2ÌÅÄÅ -Ð2ÌÑÓ -Ð2ÌÑÛ -Ð3ÌÅÔØ. -Ð5ÌÏ×Á -Ð5ÔÉÌ -Ð6Å -ÐÁ1Ó2Ë -ÐÁ2× -ÐÁ2Ó1ÔÏ -ÐÁ2ÓË1× -ÐÁ5×Ï -ÐÁ5ÄÒ -ÐÁ×1Ì -ÐÁÈ1Ì -ÐÅ2Ð1Ì -ÐÅ2ÔÌ -ÐÅ6Ó5Ë -ÐÅÐ1Ô -ÐÅÒ1× -ÐÅÒ2Í1ÁÌ -ÐÅÒÅ3Ï6Ó -ÐÉ2ÓË -ÐÉ5Ó2ËÏÐ -ÐÌÅ2×1Ò -ÐÏ1Ä2ÒÁÇ -ÐÏ1Ú -ÐÏ1ÍÎ -ÐÏ1Ð -ÐÏ1ÓË -ÐÏ1ÓÍ -ÐÏ1ÓÈ -ÐÏ1È -ÐÏ2Ä1Ö -ÐÏ2Ä1Ï2Ë -ÐÏ2Ä1Ï2ÓÉ -ÐÏ2Ä1ÒÕ -ÐÏ2Ä1ÒÕÌ -ÐÏ2Ä1ÒÕÍ -ÐÏ2Ä1ÒÕÞ -ÐÏ2Ä1Õ2ÒÏ -ÐÏ2ÄØ -ÐÏ3×ÌÉ -ÐÏ5 -ÐÏ5ÓÓ -ÐÏÇ6 -ÐÏÅ2 -ÐÏÚ2Ì -ÐÏÓ2 -ÐÏÜ1Í -ÐÐÏ1Ä -ÐÒÅ2ÄÏ2Ô -ÐÒÅ2ÄÏÈ -ÐÒÅÊ2Ó1Ë -ÐÒÉ1×2Î -ÐÒÉ1×Ë -ÐÒÉ1Ì -ÐÒÉ1Ó -ÐÒÉ1Ô -ÐÒÉ2ÔÞ -ÐÒÉÌØ2 -ÐÒÉÓ2Ð -ÐÒÉÞÅ2Ó1Ë -ÐÒÏ1Ä2Ì -ÐÒÏ1Ä2ÒÁ -ÐÒÏ1Ò -ÐÒÏ1ÓË -ÐÔ1× -ÐÕ2Â1Ì -ÐÕÇ1Ì -ÐÕÇ3Î -ÐÈ6Î -Ò1 -Ò1×Á. -Ò1×ÁÒ -Ò1×Áà -Ò1×ÅÖ -Ò1×ÅÊ -Ò1×ÅÎ -Ò1×É -Ò1×Ï -Ò1Ç -Ò1Ä -Ò1Ö -Ò1ÚÁ -Ò1ÚÅ -Ò1ÚÉ -Ò1ÚÏ -Ò1ÚÑ -Ò1Ë -Ò1Ì -Ò1Í -Ò1Î -Ò1Ð -Ò1Ò -Ò1Ó -Ò1ÔÁ -Ò1ÔÅ -Ò1ÔÉ -Ò1ÔÏ -Ò1ÔÒ -Ò1ÔÕ -Ò1ÔÙ -Ò1ÔØÀ -Ò1ÔÀ -Ò1Æ -Ò1ÈÁ -Ò1ÈÅ -Ò1ÈÌÏ -Ò1ÈÏ× -Ò1ÈÕÛ -Ò1à -Ò1Þ -Ò1Û -Ò1Ý -Ò2Ç1Ì -Ò2Ç1Î -Ò2Ç× -Ò2ÇÇ -Ò2ÇÏÔ -Ò2Ä1à -Ò2ÄÌ -Ò2ÄÎ -Ò2ÄÞ -Ò2ÖÎ -Ò2ÌØ -Ò2Í1Î -Ò2Í5Þ -Ò2ÍË -Ò2ÍÓ -Ò2ÍÆ -Ò2ÓÎ -Ò2Ô1ÁËÔ -Ò2Ô1Ì -Ò2Ô1Ï -Ò2ÕÚÌ -Ò2È× -Ò2Û1Ò -Ò2ÛË -Ò2ÛÎ -Ò2Ý3×2 -Ò5×Ñ -Ò5ÈÏÔ -Ò6Ä× -Ò6ÍÝ -Ò6ÈÒÅ -ÒÁ2ÚÏÂÌ -ÒÁ2Ð1Ì -ÒÁ2Ó1ÔÁ -ÒÁ2Ó1ÔÅÒ -ÒÁ2Ó1ÔÏ -ÒÁ2Ó1ÔÕ -ÒÁ2Ó1ÔÑ -ÒÁ2ÔÁË -ÒÁ3ÚÏÒÅÎ -ÒÁ3ÚÏÒÉ -ÒÁ5ÕÎ -ÒÁ5ÕÓ -ÒÁ6ÓÌÑ -ÒÁ6ÓÔÕÛ -ÒÁÁ6 -ÒÁÚ1× -ÒÁÓ1ÐÙ -ÒÁÓ1Ô2Ì -ÒÁÓ1ÔÒÁ -ÒÁÓ1ÔÒÏÇ -ÒÁÓ3ÔÑÎ -ÒÁÓÔÏ2ÐÌ -ÒÁÔ1× -ÒÁÈ1Ì -ÒÁÜ2 -ÒÅ1Ç2Î -ÒÅ1ÚÒ -ÒÅ1Ò2 -ÒÅ1Ó2Ð -ÒÅ1ÓÞ -ÒÅ1Ô2Ò -ÒÅ2Ä1Ï2ÂÅ -ÒÅ2Ä1Ï2ÐÅ -ÒÅ2Ä1Ï2ÓÅ -ÒÅ2Ä1Õ2Ç -ÒÅ2Ä3Ï2ÌÉ -ÒÅ2ÄÏÐÒ -ÒÅ2ÄÏÓ -ÒÅ2Ë1×ÁÔ -ÒÅ2ÏÓ -ÒÅ2È1Ò -ÒÅÇ1ÌÉ -ÒÅÄ1Ò -ÒÅÅ2 -ÒÅÊ2È -ÒÅÍ1Î -ÒÅÏ2Ä -ÒÅÏ2à -ÒÅÐ5ÌÏ -ÒÉ1Ä× -ÒÉ1ÖÍ -ÒÉ1Ú× -ÒÉ1ÍÞ -ÒÉÓ2Í -ÒÉÕ2 -ÒË1Î -ÒË6ÎÉ -ÒÏ1Ä× -ÒÏ1Ú× -ÒÏ1ÚÒ -ÒÏ1ÐÌ -ÒÏ1Ó2ËÌ -ÒÏ1ÓÍ -ÒÏ1È -ÒÏ2Ç1ÎÅ -ÒÏ2Ç1ÎÕ -ÒÏ2Ó1Ì -ÒÏ2È1Î -ÒÏ5ÂÒ -ÒÏ5ÓÐÁ -ÒÏ5ÓÐÌ -ÒÏ5ÛÔ -ÒÏÅ6È -ÒÏÉ2Ó -ÒÏÏÐ1Ò -ÒÏÒ2× -ÒÐÕÓ1Ë -ÒÒÏ1 -ÒÒÏÓ6 -ÒÓ6Ð -ÒÔ1× -ÒÔ1ÌÀ -ÒÕÇ1× -ÒÕÇ1Ì -ÒÕÇ1Î -ÒÈ1ÏÐ -ÒÙ2Ç1Î -ÒÙÔ1× -ÒÙÈ1 -ÒÀ5Ë×Á -ÒÀ5Ë×Å -Ó1×ÅÎ -Ó1ÄÁ -Ó1ÄÏ -Ó1Î -Ó1ÐÁ -Ó1ÐÉÌ -Ó1ÐÉÔ -Ó1ÐÌ -Ó1Ó -Ó1ÈÏ -Ó1à -Ó1ÞÁÔ -Ó1ÞÌ -Ó1Û6 -Ó1Ý -Ó2×ÏÑ -Ó2ÇÏÒ -Ó2ÄÏÂÎ -Ó2ËÁÔÎ -Ó2ËÌÅÒ -Ó2ÐÅÛ -Ó2ÒÁ -Ó2ÒÅÚ -Ó2Ó -Ó2ÓÎ -Ó2ÓÏÒÉ -Ó2ÔÑÎÕ -Ó2ÃÅÎÁ -Ó3Ó2ÎÅ -Ó5ÇÅ -Ó5ÄÉ -Ó5ÎÁ. -Ó5ÎÏÅ -Ó5ÎÏÊ -Ó5ÎÏÍ -Ó6ËÁË -ÓÁ2Â1Ì -ÓÁÍÏ1 -Ó×Á6Å -Ó×ÁÈ2 -Ó×Å2Ô -Ó×Å2Ô1Ì -Ó×ÅÒ2ÈÉ -Ó×ÅÒÈ1 -ÓÇ6 -ÓÅ1ÇÎ -ÓÅ1Ú -ÓÅÇÏ1 -ÓÅÇÏÄ2 -ÓÅËÓ1Ô -ÓÅÒ1×Å -ÓÅÒ5×Á -ÓÉ2Ð1Ì -ÓÉ3ÏÍ -ÓË1ÎÕ -ÓË2×ÅÒ -ÓËÏ2Â1Ì -ÓÍÏ2Ç1Ì -ÓÍÕ2Ç1 -ÓÏ1ÂÒ -ÓÏ1Ä2ÒÁ -ÓÏ1Ö -ÓÏ1Ú -ÓÏ1Ì2Ç -ÓÏ1Í2 -ÓÏ1Ò2× -ÓÏ1Ó2 -ÓÏ1Ô× -ÓÏ2×1Í -ÓÏ2ÓØ -ÓÏ2ÔÌÅ -ÓÏ3Ú2ÄÁ -ÓÏ5×Ì -ÓÏ5Ï -ÓÏ5Ý -ÓÏ6Ó5Î -ÓÐ2ÌÀÓØ. -ÓÓ1×Ï -ÓÔ1ÌÉ -ÓÔ5×ÅÒ -ÓÔ×2Ì -ÓÔÏ1ÐÌ -ÓÕ1ÇÌ -ÓÕ2 -ÓÕ2Å× -ÓÕ2ÎÉ -ÓÕÂ1Á -ÓÕÂ1Ì -ÓÕÐÅ2Ò1 -ÓÞÅ2Ó1Ë -Óß2Å3ÍÁ -ÓßÅ3Ä -ÓßÅ3Ì -ÓßÅ3ÍÏ -ÓßÅ3È -ÓÙ2Ð3ÌÅ -ÓÙÐ1ÌÀ -Ô1×ÏÊ -Ô1×ÏÀ -Ô1Ä2 -Ô1Ö -Ô1Ú -Ô1Ë -Ô1ÌÏÇ -Ô1ÒÅÚ -Ô1ÒÙ× -Ô1ÈÁ -Ô1ÈÏ -Ô1Þ -Ô1Û2 -Ô2×Ì -Ô2ÒÁ× -Ô2ÓÄ -Ô4ÒÝ -ÔÁ1ÓÔ -ÔÁÍÅ2Î -Ô×Ï1Ú -ÔÅ1ÓÔ -ÔÅ2Ë1Ì -ÔÅ2ÏÓ -ÔÅ2Ð1Ì -ÔÅ2Ò1ÁË -ÔÅ6ÈÏ -ÔÅÇ1Î -ÔÅË1ÓÔÁ -ÔÅÌÅ3Ï -ÔÅÍ5Î -ÔÅÒ1× -ÔÅÒÅ2Ï -ÔÅÔ1Ò2Á -ÔÉ1ÓÔÒ -ÔÉ2×1Ì -ÔÉ2Ç1Ì -ÔÉ5Á -ÔÉ5ÏË -ÔË2ÎÏ -ÔÏ1ÂÒ -ÔÏ1Ä -ÔÏ1Ú -ÔÏ1Ó2 -ÔÏ2ÄÎ -ÔÏ2Ö1Ä -ÔÏÏÐ1 -ÔÒÄÏ2 -ÔÒÅ2È -ÔÓ2Ë -ÔÓ2Î -ÔÕ2ÐÒ -ÔÕÐ1Ì -ÔÙÓ5Ë -ÔØ6ÍÕ -Õ1Á -Õ1ÂÁ -Õ1ÂÅ -Õ1ÂÉ -Õ1ÂÏ -Õ1ÂÕ -Õ1ÂÙ -Õ1ÂØÅ -Õ1ÂØÀ -Õ1ÂØÑ -Õ1ÂÀ -Õ1ÂÑ -Õ1×Á -Õ1×Å -Õ1×É -Õ1×Ï -Õ1×Õ -Õ1×Ù -Õ1רŠ-Õ1רÀ -Õ1×Ñ -Õ1ÇÁ -Õ1ÇÅ -Õ1ÇÉ -Õ1ÇÏ -Õ1ÇÕ -Õ1ÄÁ -Õ1ÄÅ -Õ1ÄÉ -Õ1ÄÏ -Õ1ÄÕ -Õ1ÄÙ -Õ1ÄØÉ -Õ1ÄØÀ -Õ1ÄÀ -Õ1ÄÑ -Õ1Å -Õ1ÖÁ -Õ1ÖÅ -Õ1ÖÉ -Õ1ÖÏ -Õ1ÖÕ -Õ1ÖØÅ -Õ1ÖØÉ -Õ1ÖØÀ -Õ1ÖØÑ -Õ1ÚÁ -Õ1ÚÅ -Õ1ÚÉ -Õ1ÚÏ -Õ1ÚÕ -Õ1ÚÙ -Õ1ÚØÑ -Õ1ÚÑ -Õ1É -Õ1ËÁ -Õ1ËÅ -Õ1ËÉ -Õ1ËÏ -Õ1ËÕ -Õ1ËØÑ -Õ1ÌÁ -Õ1ÌÉ -Õ1ÌÏ -Õ1ÌÕ -Õ1ÌÙ -Õ1ÌØÅ -Õ1ÌØÉ -Õ1ÌØÑ -Õ1ÌÀ -Õ1ÌÑ -Õ1ÍÁ -Õ1ÍÅ -Õ1ÍÉ -Õ1ÍÏ -Õ1ÍÕ -Õ1ÍÙ -Õ1ÍØÅ -Õ1ÍØÀ -Õ1ÍØÑ -Õ1ÍÑ -Õ1ÎÁ -Õ1ÎÅ -Õ1ÎÉ -Õ1ÎÏ -Õ1ÎÕ -Õ1ÎÙ -Õ1ÎØÅ -Õ1ÎØÉ -Õ1ÎØÀ -Õ1ÎØÑ -Õ1ÎÀ -Õ1ÎÑ -Õ1Ï -Õ1ÐÁ -Õ1ÐÅ -Õ1ÐÉ -Õ1ÐÏ -Õ1ÐÕ -Õ1ÐÙ -Õ1ÐØÅ -Õ1ÐØÀ -Õ1ÐØÑ -Õ1ÐÀ -Õ1ÐÑ -Õ1ÒÁ -Õ1ÒÅ -Õ1ÒÉ -Õ1ÒÏ -Õ1ÒÕ -Õ1ÒÙ -Õ1ÒØÅ -Õ1ÒØÉ -Õ1ÒØÀ -Õ1ÒØÑ -Õ1ÒÀ -Õ1ÒÑ -Õ1ÓÁ -Õ1ÓÅ -Õ1ÓÉ -Õ1ÓÍ -Õ1ÓÏ -Õ1ÓÔÁ -Õ1ÓÔÅ -Õ1ÓÔÉ -Õ1ÓÔÕ -Õ1ÓÔÙ -Õ1ÓÔØÅ -Õ1ÓÔØÀ -Õ1ÓÔØÑ -Õ1ÓÔÑ -Õ1ÓÕ -Õ1ÓÆ -Õ1ÓÙ -Õ1ÓØÅ -Õ1ÓØÀ -Õ1ÓØÑ -Õ1ÓÀ -Õ1ÔÁ -Õ1ÔÅ -Õ1ÔÉ -Õ1ÔÌ -Õ1ÔÏ -Õ1ÔÕ -Õ1ÔÙ -Õ1ÔØÅ -Õ1ÔØÀ -Õ1ÔØÑ -Õ1ÔÀ -Õ1ÔÑ -Õ1Õ -Õ1ÆÁ -Õ1ÆÅ -Õ1ÆÉ -Õ1ÆÏ -Õ1ÆÕ -Õ1ÆØÅ -Õ1ÆØÉ -Õ1ÆØÀ -Õ1ÆØÑ -Õ1ÈÁ -Õ1ÈÅ -Õ1ÈÉ -Õ1ÈÏ -Õ1ÈÕ -Õ1ÃÁ -Õ1ÃÅ -Õ1ÃÉ -Õ1ÃÕ -Õ1ÞÁ -Õ1ÞÅ -Õ1ÞÉ -Õ1ÞÕ -Õ1ÞØÅ -Õ1ÞØÉ -Õ1ÞØÀ -Õ1ÞØÑ -Õ1ÛÁ -Õ1ÛÅ -Õ1ÛÉ -Õ1ÛÏ -Õ1ÛÕ -Õ1ÛØÅ -Õ1ÛØÉ -Õ1ÛØÀ -Õ1ÛØÑ -Õ1ÝÁ -Õ1ÝÅ -Õ1ÝÉ -Õ1ÝÏ -Õ1ÝÕ -Õ1À -Õ1Ñ -Õ2Â1Ò -Õ2Ä1Ò -Õ2ÅÓ -Õ2È1Ò -Õ2È× -Õ2Û1ÌÙ -Õ4ÎÙ× -Õ5ÌÅ -Õ5ÍÒ -Õ5ÏÌ -Õ5ÛÌ -Õ5Ü -Õ6ÁÌÅ -Õ6ÁÓ -Õ6ÚÅÌ -Õ6ÔÒØ -ÕÄ2× -ÕÄ2ÒÓ -ÕÅ1Ò -ÕÅ2ÄÉ -ÕÚ5ÄÀ -ÕË1× -ÕË5Î -ÕËÏÓ6 -ÕÌØ1Ä -ÕÍ1ÎÏÇ -ÕÏ2Ë -ÕÒ1× -ÕÓ1ËÁ -ÕÓ1ËÅ -ÕÓ1ËÉ -ÕÓ1ËÏÍ -ÕÓ1Þ -ÕÓ2ËÒ -ÕÓ2ÐÏ -ÕÓ5ËÏ× -ÕÓ5ËÕ. -ÕÔ5ÌÁ -ÕÔØ6Í -ÕÕ2Ó -ÕÈ1Ì -ÕÈ1Í -ÕÈ1Ï2Ë -ÕÛ3Ð -ÕÜ5ÌÁ -ÕÜ5ÌÅ -ÕÑ2Ú -Æ1 -Æ1Ç -Æ1Ë -Æ1Í -Æ1Ô -Æ1Æ -Æ1Û -Æ2ÕÚÌ -ÆÁ5Õ -ÆÁÇ1Î -ÆÁÒ5× -ÆÅ1Ä -ÆÅ2Ä1Ò -ÆÅ2Ó1Ë -ÆÅÎÉ6 -ÆÉ1Ä -ÆÉ1Ó2Ë -ÆÉ2ÄÎ -ÆÉ3Ï -ÆÉ6ÎÉÎ -ÆÏÔÏ1 -ÆÒÁ5Ó -ÆÒÅ2Ó1Ë -È1 -È1Ä6 -È1Ú -È1Ë -È1ÌÉ -È1ÌÏ. -È1ÌÕ -È1ÌÙ -È1ÌÑ -È1ÍÁ -È1ÍÉ -È1Î -È1ÏÓÎ -È1Ð -È1Ô -È1Õ2ÒÏ -È1Æ6 -È1È -È1à -È1Û -È1Ü -È2ÌÉÐ -È2ÌÑ -È4ÎÙ -È5ÌÁ. -È5ÍÅÔ -È5ÏÓÍ -ÈÅ6Ï5 -ÈÉ2ÚÙ -ÈÉÅ2 -ÈÏ1Ô× -ÈÏ2ÐÅ -ÈÏÚ1ÁÒ -ÈÒÉ2Ð1Ì -ÈÒÏ2Í1Þ -Ã1 -Ã1Ä -Ã1Î -Ã1Ò -Ã1à -ÃÁ2Ð1Ì -ÃÅ1Ä -ÃÅ2Ä1Ò -ÃÅÊ6Ô5 -ÃÉ2Ë1Ì -ÃÉ2Æ1Ò -Þ1× -Þ1Î -Þ1Ó -Þ1Þ -Þ1Û -ÞÁ2Ô1Ì -ÞÁÒ3Ô -ÞÁÓÔ1× -ÞÅÔ1×ÅÒ -ÞÅÈ1Ì -ÞÉ2Ó1Ì -ÞÕ2Ö1Ä -Û1Ë -Û1ÌÑ -Û1Í -Û1Î -Û1Ó -Û1à -Û2ËÉ× -Û2ÌÅÍ -Û2ÌÀÔ. -Û2ÐÒ -Û5Þ -Û6ÌÅÛ -ÛÁÇ1Î -ÛÅ1Ó -ÛÅÏ2 -ÛÉ2×1Ì -ÛÉ2Æ1Ò -ÝÅ1Ä -ÝÅ1Ó -ÝÅ2Ä1Ò -ÝÉ2Ð3Ì -ß1Ñ2 -ßÅ2 -ßÅ3È -ßÅÍ3Î -ßÀ6Ó -ßÀ6Ô -Ù1ÂÁ -Ù1ÂÅ -Ù1ÂÉ -Ù1ÂÏ -Ù1ÂÒ -Ù1ÂÕ -Ù1ÂÙ -Ù1ÂØÅ -Ù1ÂØÉ -Ù1ÂØÀ -Ù1ÂØÑ -Ù1ÂÑ -Ù1×Á -Ù1×Å -Ù1×É -Ù1×Ï -Ù1×Õ -Ù1×Ù -Ù1×Ñ -Ù1Ç -Ù1ÇÁ -Ù1ÇÅ -Ù1ÇÉ -Ù1ÇÏ -Ù1ÇÕ -Ù1ÄÁ -Ù1Ä× -Ù1ÄÅ -Ù1ÄÉ -Ù1ÄÏ -Ù1ÄÕ -Ù1ÄÙ -Ù1ÄÀ -Ù1ÄÑ -Ù1Å2 -Ù1ÖÁ -Ù1ÖÅ -Ù1ÖÖ -Ù1ÖÉ -Ù1ÖÍ -Ù1ÖÏ -Ù1ÖÒ -Ù1ÖÕ -Ù1ÚÁ -Ù1Ú× -Ù1ÚÄ -Ù1ÚÅ -Ù1ÚÏ -Ù1ÚÒ -Ù1ÚÕ -Ù1ÚÙ -Ù1ÚÑ -Ù1É2 -Ù1ËÁ -Ù1ËÅ -Ù1ËÉ -Ù1ËÏ -Ù1ËÕ -Ù1ÌÁ -Ù1ÌÅ -Ù1ÌÉ -Ù1ÌÏ -Ù1ÌÕ -Ù1ÌÙ -Ù1ÌØÅ -Ù1ÌØÉ -Ù1ÌØÑ -Ù1ÌÀ -Ù1ÌÑ -Ù1ÍÁ -Ù1ÍÅ -Ù1ÍÉ -Ù1ÍÏ -Ù1ÍÕ -Ù1ÍÙ -Ù1ÍÑ -Ù1ÎÁ -Ù1ÎÅ -Ù1ÎÉ -Ù1ÎÏ -Ù1ÎÕ -Ù1ÎÙ -Ù1ÎØÅ -Ù1ÎØÉ -Ù1ÎØÀ -Ù1ÎØÑ -Ù1ÎÀ -Ù1ÎÑ -Ù1ÐÁ -Ù1ÐÅ -Ù1ÐÉ -Ù1ÐÏ -Ù1ÐÕ -Ù1ÐÙ -Ù1ÐØÅ -Ù1ÐØÀ -Ù1ÐÑ -Ù1ÒÁ -Ù1Ò× -Ù1ÒÅ -Ù1ÒÉ -Ù1ÒÏ -Ù1ÒÕ -Ù1ÒÙ -Ù1ÒØÅ -Ù1ÒØÀ -Ù1ÒØÑ -Ù1ÒÀ -Ù1ÒÑ -Ù1ÓÁ -Ù1ÓÅ -Ù1ÓÉ -Ù1ÓÏ -Ù1ÓÔ -Ù1ÓÔÁ -Ù1ÓÔÅ -Ù1ÓÔÉ -Ù1ÓÔÕ -Ù1ÓÔÙ -Ù1ÓÔØÀ -Ù1ÓÕ -Ù1ÓÙ -Ù1ÓØÅ -Ù1ÓØÉ -Ù1ÓØÀ -Ù1ÓØÑ -Ù1Ô6Ò -Ù1ÔÁ -Ù1ÔÅ -Ù1ÔÉ -Ù1ÔÏ -Ù1ÔÕ -Ù1ÔÙ -Ù1ÔØÅ -Ù1ÔØÉ -Ù1ÔØÀ -Ù1ÔØÑ -Ù1ÔÑ -Ù1Õ2 -Ù1ÈÁ -Ù1ÈÅ -Ù1ÈÉ -Ù1ÈÏ -Ù1ÈÕ -Ù1à -Ù1ÃÁ -Ù1ÃÅ -Ù1ÞÁ -Ù1ÞÅ -Ù1ÞÉ -Ù1ÞÕ -Ù1ÞØÅ -Ù1ÞØÉ -Ù1ÞØÀ -Ù1ÞØÑ -Ù1ÛÁ -Ù1ÛÅ -Ù1ÛÉ -Ù1ÛÏ -Ù1ÛÕ -Ù1ÛØÀ -Ù1ÛØÑ -Ù1ÝÁ -Ù1ÝÅ -Ù1ÝÉ -Ù1ÝÏ -Ù1ÝÕ -Ù1Ñ2 -Ù2Ú1×ÏÌ -Ù2Ó1ËÕ -Ù5ÓÍ -Ù6ÛÎ -ÙË1× -ÙË5Î -ÙÍ1Þ -ÙÐ1ÌÅ -ÙÒÅ2È -ÙÓ2ÍÅÊ -ÙÓ5ËÉ -ÙÓ6ÐÁ -ÙÓ6ÐÌ -ÙÛ1ÌÅ -Ø1 -Ø1×Á -Ø1×Å -Ø1×É -Ø1Ç -Ø1ÄÅ -Ø1ÄÉ -Ø1Ö -Ø1Ú -Ø1Ë -Ø1Í -Ø1Î -Ø1Ð -Ø1Ó -Ø1Ô -Ø1È -Ø1Þ -Ø1Û -Ø1Ý -Ø1Ü -Ø2Ë1ÌÏ -Ø2ÎÕÌ -Ø2ÓÎ -Ø2ÓÔÉ -Ø2ÓÔÑ -Ø2Æ1ÒÁ -Ø5ÄØ -Ø5ÄÑ -Ø5ÆÅ -Ø6ÚÎ -Ø6ÚÑ. -Ø6ÍÓ -Ø6ÝÁ -Ø6ÝÅ -Ø6ÝÕ -ØÄÏ1 -ØË5Î -ØÔÉ5Ó -ØÈÏ2 -Ü1ÌÑ -Ü1ÎØ -Ü1Ï -Ü1Ñ -Ü2Ä -Ü5ÚÉ -Ü5ËÁ -Ü5ËÅ -Ü5ÌÙ -Ü5ÒÉ -Ü5Û -Ü6× -Ü6Æ -ÜÄ1Ò -ÜË1× -ÜË1Ú -ÜË1Ì -ÜË2ÓË -ÜËÓ1 -ÜËÓ2É -ÜÌØ5 -ÜÒÏ1 -ÜÓ1Ë -ÜÓ2ÐÁ -ÜÓ5Í -À1Á -À1 -À1ÂÁ -À1ÂÅ -À1ÂÉ -À1ÂÏ -À1ÂÕ -À1ÂÙ -À1ÂÑ -À1×Á -À1×Å -À1×É -À1×Ï -À1×Õ -À1×Ù -À1ÇÁ -À1ÇÅ -À1ÇÉ -À1ÇÏ -À1ÇÕ -À1ÄÁ -À1ÄÅ -À1ÄÉ -À1ÄÏ -À1ÄÕ -À1ÄÙ -À1ÄØÀ -À1ÄÑ -À1Å -À1ÖÁ -À1ÖÅ -À1ÖÉ -À1ÖÏ -À1ÖÕ -À1ÖØÅ -À1ÖØÉ -À1ÖØÀ -À1ÖØÑ -À1ÚÁ -À1ÚÅ -À1ÚÉ -À1ÚÏ -À1ÚÕ -À1ÚÙ -À1ÚÀ -À1ÚÑ -À1É -À1ËÁ -À1ËÅ -À1ËÉ -À1ËÏ -À1ËÕ -À1ÌÁ -À1ÌÅ -À1ÌÏ -À1ÌÕ -À1ÌÙ -À1ÌÀ -À1ÌÑ -À1ÍÁ -À1ÍÅ -À1ÍÉ -À1ÍÏ -À1ÍÕ -À1ÍÙ -À1ÎÁ -À1ÎÅ -À1ÎÉ -À1ÎÏ -À1ÎÕ -À1ÎÙ -À1ÎÀ -À1ÎÑ -À1Ï -À1ÐÁ -À1ÐÉ -À1ÐÏ -À1ÒÁ -À1ÒÅ -À1ÒÉ -À1ÒÏ -À1ÒÕ -À1ÒÙ -À1ÒÀ -À1ÒÑ -À1ÓÁ -À1ÓÅ -À1ÓÏ -À1ÓÔÁ -À1ÓÔÅ -À1ÓÔÉ -À1ÓÔÒ -À1ÓÔÕ -À1ÓÔÙ -À1ÓÔØÀ -À1ÓÔÑ -À1ÓÕ -À1ÓÙ -À1ÓÀ -À1ÔÁ -À1ÔÅ -À1ÔÉ -À1ÔÏ -À1ÔÕ -À1ÔÙ -À1ÔÑ -À1ÆÁ -À1ÆÅ -À1ÆÑ -À1ÈÁ -À1ÈÅ -À1ÈÉ -À1ÈÏ -À1ÈÕ -À1ÃÅ -À1ÃÉ -À1ÛÁ -À1ÛÅ -À1ÛÉ -À1ÛÏ -À1ÛÕ -À1ÝÁ -À1ÝÅ -À1ÝÉ -À1ÝÏ -À1ÝÕ -À1À -À1Ñ -À2ÂÞ -À2Ä1Ö -À2ÌÉ -À2Ó1Ë -ÀÊ2Ä1 -ÀÊÄÏ6 -ÀË1Ú -ÀË1Î -ÀÍ1Î -ÀÍÉÎÉ5 -Ñ1ÂÁ -Ñ1ÂÅ -Ñ1ÂÉ -Ñ1ÂÏ -Ñ1ÂÒ -Ñ1ÂÕ -Ñ1ÂÙ -Ñ1ÂØÀ -Ñ1ÂÑ -Ñ1×Á -Ñ1×Å -Ñ1×É -Ñ1×Ï -Ñ1×Õ -Ñ1×Ù -Ñ1רÀ -Ñ1×Ñ -Ñ1ÇÁ -Ñ1ÇÅ -Ñ1ÇÉ -Ñ1ÇÏ -Ñ1ÇÕ -Ñ1ÄÁ -Ñ1ÄÅ -Ñ1ÄÉ -Ñ1ÄÏ -Ñ1ÄÕ -Ñ1ÄÙ -Ñ1ÄØÀ -Ñ1ÄÀ -Ñ1ÄÑ -Ñ1Å -Ñ1ÖÁ -Ñ1ÖÅ -Ñ1ÖÉ -Ñ1ÖÏ -Ñ1ÖÕ -Ñ1ÖØÅ -Ñ1ÖØÉ -Ñ1ÖØÀ -Ñ1ÖØÑ -Ñ1ÚÁ -Ñ1ÚÅ -Ñ1ÚÉ -Ñ1ÚÏ -Ñ1ÚÕ -Ñ1ÚÙ -Ñ1ÚØÀ -Ñ1ÚØÑ -Ñ1ÚÀ -Ñ1ÚÑ -Ñ1É -Ñ1ËÁ -Ñ1ËÅ -Ñ1ËÉ -Ñ1ËÏ -Ñ1ËÕ -Ñ1ÌÁ -Ñ1ÌÅ -Ñ1ÌÉ -Ñ1ÌÏ -Ñ1ÌÕ -Ñ1ÌÙ -Ñ1ÌÀ -Ñ1ÌÑ -Ñ1ÍÁ -Ñ1ÍÅ -Ñ1ÍÉ -Ñ1ÍÏ -Ñ1ÍÕ -Ñ1ÍÙ -Ñ1ÍÑ -Ñ1ÎÁ -Ñ1ÎÅ -Ñ1ÎÉ -Ñ1ÎÏ -Ñ1ÎÕ -Ñ1ÎÙ -Ñ1ÎØÅ -Ñ1ÎØÉ -Ñ1ÎØÀ -Ñ1ÎØÑ -Ñ1ÎÀ -Ñ1ÎÑ -Ñ1ÐÁ -Ñ1ÐÅ -Ñ1ÐÉ -Ñ1ÐÏ -Ñ1ÐÕ -Ñ1ÐÙ -Ñ1ÐØÅ -Ñ1ÐØÀ -Ñ1ÐØÑ -Ñ1ÐÑ -Ñ1ÒÁ -Ñ1ÒÅ -Ñ1ÒÉ -Ñ1ÒÏ -Ñ1ÒÕ -Ñ1ÒÙ -Ñ1ÒØÅ -Ñ1ÒØÀ -Ñ1ÒØÑ -Ñ1ÒÑ -Ñ1ÓÁ -Ñ1ÓÅ -Ñ1ÓÉ -Ñ1ÓÏ -Ñ1ÓÔÁ -Ñ1ÓÔÉ -Ñ1ÓÔÕ -Ñ1ÓÔÙ -Ñ1ÓÔØÅ -Ñ1ÓÔØÀ -Ñ1ÓÔØÑ -Ñ1ÓÕ -Ñ1ÓÙ -Ñ1ÔÁ -Ñ1ÔÅ -Ñ1ÔÏ -Ñ1ÔÕ -Ñ1ÔÙ -Ñ1ÔØÅ -Ñ1ÔØÀ -Ñ1ÔØÑ -Ñ1ÔÀ -Ñ1ÔÑ -Ñ1Õ -Ñ1ÈÁ -Ñ1ÈÅ -Ñ1ÈÉ -Ñ1ÈÏ -Ñ1ÈÕ -Ñ1ÃÁ -Ñ1ÃÅ -Ñ1ÃÉ -Ñ1ÃÕ -Ñ1ÞÁ -Ñ1ÞÅ -Ñ1ÞÉ -Ñ1ÞÕ -Ñ1ÞØÅ -Ñ1ÞØÉ -Ñ1ÞØÀ -Ñ1ÞØÑ -Ñ1ÛÁ -Ñ1ÛÅ -Ñ1ÛÉ -Ñ1ÛÏ -Ñ1ÛÕ -Ñ1ÝÁ -Ñ1ÝÅ -Ñ1ÝÉ -Ñ1ÝÏ -Ñ1ÝÕ -Ñ1À -Ñ1Ñ -Ñ2×1Ì -Ñ5ÓÔÒ -Ñ5ÔÉ -ÑÇ1Ì -ÑÇ5Î -ÑÚ1× -ÑË1Î -ÑÓ1Ë -ÑÓ6Ô -ÑÔ1× -ÑÈ1ÌÅ diff --git a/dist-packages/wordaxe/wordaxe/hnj.py b/dist-packages/wordaxe/wordaxe/hnj.py deleted file mode 100755 index 33347a570..000000000 --- a/dist-packages/wordaxe/wordaxe/hnj.py +++ /dev/null @@ -1,160 +0,0 @@ -#!/usr/bin/env python -# -*- coding: iso-8859-1 -*- - -__license__=""" - Copyright 2004-2008 Henning von Bargen (henning.vonbargen arcor.de) - This software is dual-licenced under the Apache 2.0 and the - 2-clauses BSD license. For details, see license.txt -""" - -__version__=''' $Id: __init__.py,v 1.2 2004/05/31 22:22:12 hvbargen Exp $ ''' - -import os,sys - -from hyphen import * -from xml.sax.saxutils import escape,quoteattr - -from wordaxe.BaseHyphenator import BaseHyphenator - -VERBOSE = False - -class PyHnjHyphenator(BaseHyphenator): - """ - Hyphenation using pyHnj (Knuth's algorithm). - @TODO The current algorithm does NOT use Knuths algorithm, - but a more or less trivial one. - """ - - def __init__ (self, - language="EN", - minWordLength=4, - quality=8, - hyphenDir=None - ): - BaseHyphenator.__init__(self,language=language,minWordLength=minWordLength) - if hyphenDir is None: - hyphenDir = os.path.join (os.path.split(__file__)[0], "dict") - # load pattern file - fname = os.path.join(hyphenDir,"hyph_%s.dic"%language) - # first line is set of characters, all other lines are patterns - # Note: we do not use a TRIE, we just store the patterns in a dict string:codes - self.quality = quality - lines = open(fname).read().splitlines() - self.characters = lines.pop(0) - self.patterns = {} - for pattern in lines: - pat = "" - codes = "" - digit = "0" - for ch in pattern: - if ch>='0' and ch<='9': - digit = ch - else: - codes = codes+digit - pat = pat+ch - digit = "0" - codes = codes+digit - self.patterns[pat.decode("iso-8859-1")] = codes - - # Hilfsfunktion - def schiebe(self,offset,L): - return [HyphenationPoint(h.indx+offset,h.quality,h.nl,h.sl,h.nr,h.sr) for h in L] - - def zerlegeWort(self,zusgWort): - ### This was the call to pyHnj - ### codes = self.hnj.getCodes(zusgWort.lower()) - ### - ### Here comes the new logic. - - word = "." + zusgWort.lower() + "." - #print "word=%s" % word - # Alle Längen durchgehen (minimum: 2) - codes = ["0"]*len(word) - for patlen in range(2,len(word)-1): - #print "patlen %d" % patlen - for startindx in range(len(word)-patlen): - #print "startindx %d" % startindx - try: - patcode = self.patterns[word[startindx:startindx+patlen]] - #print "testpat=%s patcode=%s" % (word[startindx:startindx+patlen],patcode) - for i,digit in enumerate(patcode): - if digit > codes[i+startindx]: - codes[i+startindx] = digit - except KeyError: - pass - codes = codes[2:-1] - #print zusgWort - #print "".join(codes) - - ### end of the new logic. - - hyphPoints = [] - for i in range(len(codes)): - if (ord(codes[i])-ord('0')) % 2: - hyphPoints.append(HyphenationPoint(i+1,self.quality,0,self.shy,0,u"")) - return [hyphPoints] - - def hyphenate(self,aWord): - assert isinstance(aWord, unicode) - hword = HyphenatedWord(aWord) - loesungen = self.zerlegeWort(aWord) - if len(loesungen)>1: - #hword.info = ("AMBIGUOUS", loesungen) - # nimm nur solche Trennstellen, die in allen Lösungen vorkommen, - # und für die Qualität nimm die schlechteste. - loesung = [] - loesung0, andere = loesungen[0], loesungen[1:] - for i,hp in enumerate(loesung0): - q = hp.quality - for a in andere: - if q: - for hp1 in a: - if hp1.indx==hp.indx \ - and hp1.nl==hp.nl and hp1.sl==hp.sl \ - and hp1.nr==hp.nr and hp1.sr==hp.sr: - q = min(q,hp1.quality) - break - else: - # Trennstelle nicht in der anderen Lösung enthalten - q = 0 - if q: - loesung.append(HyphenationPoint(hp.indx,q,hp.nl,hp.sl,hp.nr,hp.sr)) - elif len(loesungen)==1: - loesung = loesungen[0] - #hword.info = ("HYPHEN_OK", loesung) - if not loesung: - pass #hword.info = ("NOT_HYPHENATABLE", aWord) - else: - #hword.info = ("UNKNOWN", aWord) - loesung = [] - #for i in range(len(aWord)): - for i in range(1,len(aWord)-1): - if aWord[i] in self.postfixChars and aWord[i+1] not in "0123456789": - #print "Trenne", aWord,"an Position:",i,"bei",aWord[i] - # in zwei Teile zerlegen und getrennt betrachten - r = self.shy - if aWord[i] in [self.shy,u"-"]: - r = u"" - loesung1 = self.hyphenate(aWord[:i]) - loesung1.hyphenations.append (HyphenationPoint(i+1,9,0,r,0,u"")) - loesung2 = self.hyphenate(aWord[i+1:]) - # TODO diese Lösungen müssen jetzt zusammengeführt werden. - if loesung2.hyphenations == []: - #nur der 1. Teil kann getrennt werden - loesung = loesung1.hyphenations - else: - #beide Teile können getrennt werden - loesung = loesung1.hyphenations + [HyphenationPoint(hp.indx+i+1,hp.quality,hp.nl,hp.sl,hp.nr,hp.sr) for hp in loesung2.hyphenations] - break - else: - loesung = BaseHyphenator.hyphenate(self,aWord).hyphenations - - hword.hyphenations = loesung - #print "hyphenate %s -> %d points" % (aWord,len(loesung)) - return hword - -if __name__=="__main__": - #print sys.stdout.encoding - h = PyHnjHyphenator("de_DE",5) - h.test(outfname="PyHnjLearn.html") - \ No newline at end of file diff --git a/dist-packages/wordaxe/wordaxe/hyphen.py b/dist-packages/wordaxe/wordaxe/hyphen.py deleted file mode 100755 index a0d5a6f45..000000000 --- a/dist-packages/wordaxe/wordaxe/hyphen.py +++ /dev/null @@ -1,357 +0,0 @@ -# -*- coding: iso-8859-1 -*- - -__license__=""" - Copyright 2004-2008 Henning von Bargen (henning.vonbargen arcor.de) - This software is dual-licenced under the Apache 2.0 and the - 2-clauses BSD license. For details, see license.txt -""" - -__version__=''' $Id: __init__.py,v 1.2 2004/05/31 22:22:12 hvbargen Exp $ ''' - -from copy import copy -SHY = "\xAD".decode("iso-8859-1") - -class HyphenationPoint(object): - """ - A possible hyphenation point in a HyphenatedWord. - - Attributes: - indx : The index where to split the word. - quality : The quality of this hyphenation point (0=bad,5=average,9=very good). - nl,sl,nr,sr : Replacement parameters. - - Description: - When we split the word at this hyphenation point, - we can build the two strings left,right as follows: - left = word[:pos-nl] + sl - right = sr + word[pos+nr:] - - Some examples (where q is some quality, i.e. q=5): - - The usual case is nl=0,sl="\173",nr=0,sr="". - In other words, just add a "shy" character to the left string. - "Lesen" (to read) can be hyphenated as "le-" "sen": - HyphenationPoint(2,q,0,"\173",0,"") - - In some cases, it is not necessary to add the shy character: - "ABC-Buch" (ABC book) can be hyphenated as "ABC-" "buch": - HyphenationPoint(4,q,0,"",0,"") - - And - especially using the OLD german rules - the case - nl>0 or nr>0 can occur: - - The word "backen" (to bake) can be hyphenated between the "c" and the "k"; - however, the hyphenated version would be "bak-" "ken". - Thus, the one and only hyphenation point in this word is - HyphenationPoint(3,q,1,"k"+"\173",0,"") - - Another example: According to the old german rules, the word "Schiffahrt" - is a concatenation of "Schiff" (ship) and "fahrt" (journey). - The triple "f" is shortened to a double "f". - But in case of hyphenation, it's three "f"s again: "Schiff-" "fahrt". - HyphenationPoint(5,q,0,"f"+shy,0,"") - This could also be expressed as HyphenationPoint(6,q,0,shy,0,"f"). - """ - __slots__ = ["indx","quality","nl","sl","nr","sr"] - def __init__(self,indx,quality,nl=0,sl=u"",nr=0,sr=u""): - self.indx = indx - self.quality = quality - self.nl = nl - self.sl = unicode(sl) - self.nr = nr - self.sr = unicode(sr) - def __str__(self): - return 'HyphP(%d,%d)' % (self.indx,self.quality) - def __repr__(self): - return 'HyphenationPoint(%d,%d,%d,%s,%d,%s)' % (self.indx,self.quality,self.nl,`self.sl`,self.nr,`self.sr`) - -def _lshift(hyphenations, amt): - "Moves the hyphenation points left" - hyph = [] - for h in hyphenations: - if type(h) is int: - if h > amt: - hyph.append(h-amt) - else: - if h.indx > amt: - hyph.append(HyphenationPoint(h.indx-amt,h.quality,h.nl,h.sl,h.nr,h.sr)) - return hyph - -class HyphenatedWord(unicode): - """ - A hyphenated word. - - Attributes: - word: the word without hyphenations - hyphenations: a list containing the possible hyphenation points. - info: Information about the hyphenation process. - - See also class Hyphenator for an explanation. - """ - - __slots__ = ["hyphenations",] - - def __new__(klass, word, hyphenations=None, encoding="utf-8", errors='strict'): - if isinstance(word, unicode): - o = unicode.__new__(klass, word) - else: - o = unicode.__new__(klass, word, encoding, errors) - if hyphenations is not None: - o.hyphenations = hyphenations - elif hasattr(word, "hyphenations"): - o.hyphenations = word.hyphenations - else: - raise ValueError("'hyphenations' Argument is missing") - return o - - def __str__(self): - return self.encode("utf-8") - - def __repr__(self): - return ("HyphenatedWord(%s)" % super(HyphenatedWord, self).__repr__()) - - def __add__(self, other): - """(other) -> instance of this class - Like unicode.__add__, but assumes that the other element - is either unicode or an utf-8 encoded string. - """ - if not isinstance(other,unicode): - other = unicode(other, "utf-8") - return unicode(unicode.__add__(self, other)) - - def __radd__(self, other): - """(other) -> instance of this class - Like unicode.__add__, but assumes that the other element - is either unicode or an utf-8 encoded string. - """ - if isinstance(other, basestring): - if not isinstance(other,unicode): - other = unicode(other, "utf-8") - return unicode(unicode.__add__(other, self)) - else: - return NotImplemented - - def split(self, hp): - """Performs a split at the given hyphenation point. - - Returns a tuple (left,right) - where left is a string (the left part, including the hyphenation character) - and right is a HyphenatedWord describing the rest of the word. - """ - if type(hp) is int: - left = self[:hp] + SHY - hyph = _lshift (self.hyphenations, hp) - print hyph - right = self.__class__(self[hp:], hyphenations=hyph) - else: - shift = hp.indx-hp.nr+len(hp.sr) - left = self[:hp.indx-hp.nl] + hp.sl - hyph = _lshift (self.hyphenations, shift) - right = self.__class__(hp.sr+self[hp.indx+hp.nr:], hyphenations=hyph) - assert isinstance(left, unicode) - assert isinstance(right, self.__class__) - return (left,right) - - def prepend(self, string): - "Allows adding prefix chars (such as '('), returning a new HyphenatedWord" - return self.__class__(unicode(string) + self, hyphenations=_lshift(self.hyphenations,-len(string))) - - def append(self, string): - "Allows adding suffix chars (such as ')'), returning a new HyphenatedWord" - return self.__class__(self + unicode(string), hyphenations=self.hyphenations) - - def showHyphens(self): - "Returns the possible hyphenations as a string list, for debugging purposes." - L = [] - for h in self.hyphenations: - left,right = self.split(h) - L.append(u"%s %s (%d)" % (left,right, h.quality)) - return L - - def get_hyphenations(self): - "Returns an iteration of the possible hyphenations." - for hp in self.hyphenations: - yield self.split(hp) - - @staticmethod - def join(*hyphwords): - """ - Create a new hyphenated word from a list of other hyphenated words. - a = HyphenatedWord("Vogel") # Vo-gel - b = HyphenatedWord("grippe") # grip-pe - Inserts a good quality hyphenation point at the boundaries. - c = HyphenatedWord.join(a,b) - # Vo-gel=grip-pe. - """ - if len(hyphwords) == 1: - hyphwords = hyphwords[0] - for w in hyphwords: - assert isinstance(w,HyphenatedWord) - word = u"".join(hyphwords) - hps = [] - offset = 0 - for w in hyphwords: - hps += _lshift(w.hyphenations, -offset) - if w is not hyphwords[-1]: - #print w.word - if w.endswith(u"-") or w.endswith(SHY): - hps.append(HyphenationPoint(offset+len(w), quality=9)) - else: - hps.append(HyphenationPoint(offset+len(w), quality=9, sl=SHY)) - offset += len(w) - return HyphenatedWord(word, hyphenations=hps) - -class Hyphenator: - """ - Hyphenator serves as the base class for all hyphenation implementation classes. - - Some general thoughts about hyphenation follow. - - Hyphenation is language specific. - Hyphenation is encoding specific. - Hyphenation does not use the context of a word. - Good Hyphenation enables the reader to read fluently, - bad hyphenation can make a word hard to read. - - Hyphenation is language specific: - The same word may be valid in several languages, - and the valid hyphenation points can depend on the language. - Example: Situation - - Hyphenation is encoding specific: - This is just an implementation detail really, - however an important one. - For example, every hyphenation algorithm uses some internal - encoding scheme, and it should document this scheme. - How is the input encoding and the output encoding? - - Hyphenation does not use the context of the word: - Surely, it could make sense to "understand" the context. - There may be some words that should be hyphenated differently - depending on the context. - But this would make a really BIG overhead; - and I can't really think of an example. It's not worth thinking about it. - - Good Hyphenation enables the reader to read fluently, - bad hyphenation can make a word hard to read. - Some languages, for example german, make frequent use of - the concatenation of several simple words to build more complex words, - like "Hilberts Nullstellensatz" (something I remember from Algebra). - Null = Zero - Stelle = Place, Location - Satz = Theorem (math) - - The one famous example for bad german hyphenation is the word "Urinstinkt". - This is made up of - Ur = Primal - Instinkt = Instinct - Hyphenatiing this word in a valid, but unfortunate position, - yields "Urin-stinkt" (urine stinks). - - These thoughts have led to the following interface for hyphenation. - """ - - def __init__ (self, language, minWordLength=4, codec=None, shy=SHY, **options): - """ - Creates a new hyphenator instance for the given language. - In this base class, the language arguments serves only for - information purposes. - Words shorter than minWordLength letters will never be considererd - for hyphenation. - """ - self.language = language - self.minWordLength = 4 - assert isinstance(shy, unicode) - self.shy = shy - self.options = options - - """ - self.codec = codec - if self.codec is None: - import encodings.latin_1 - self.codec = encodings.latin_1.Codec() - """ - """ - def getCodec(self): - return self.codec - """ - - def getLanguage(self): - return self.language - - def getMinWordLength(self): - return self.minWordLength - - def setMinWordLength(self,nLength): - if type(nLength)==int and nLength>2 and nLength<100: - self.minWordLength = nLength - else: - raise ValueError, nLength - - def __repr__(self): - #return "%s(%s,%d,%s)" % (str(self.__class__),self.language,self.minWordLength,self.codec) - return "%s(%s,%d)" % (str(self.__class__),self.language,self.minWordLength) - - def postHyphenate(self,hyphenatedWord): - """This function is called whenever hyphenate has been called. - It can be used to do some logging, - or to add unknown words to a dictionary etc. - """ - if hyphenatedWord is not None: - assert isinstance(hyphenatedWord, HyphenatedWord) - assert type(hyphenatedWord.hyphenations) == list - - def i_hyphenate(self, aWord): - """ - This base class does not support any hyphenation! - """ - return None - - def hyphenate(self,aWord): - """ - Finds possible hyphenation points for a aWord, returning a HyphenatedWord - or None if the hyphenator doesn't know the word. - """ - assert isinstance(aWord,unicode) - hword = self.i_hyphenate(aWord) - self.postHyphenate(hword) - return hword - -class Cached(Hyphenator): - """ - This caches the results of the hyphenate function. - Use it if the hyphenation is too slow. - """ - - def __init__(self, hyphenator, max_entries): - """ - Creates a new, cached version of hyphenator - that caches at most max_entries of the results - from hyphenator.hyphenate. - If you need other functionality of the hyphenator, - you have to access the attribute "hyphenator" - directly. - """ - self._max_entries = max_entries - assert isinstance(hyphenator, Hyphenator) - self.hyphenator = hyphenator - self.cache = dict() - - def hyphenate(self, aWord): - """ - Get the hyphenated word for word from the cache. - If not found there, call the internal hyphenator - and add to the cache (like a lazy setdefault). - """ - cache = self.cache - if aWord not in cache: - if len(cache) >= self._max_entries: - self.cache = dict() - self.cache[aWord] = self.hyphenator.hyphenate(aWord) - return self.cache[aWord] - - def purge_cache(self): - """ - Purges the cache (freeing resources). - """ - self.cache = dict() diff --git a/dist-packages/wordaxe/wordaxe/hyphrules.py b/dist-packages/wordaxe/wordaxe/hyphrules.py deleted file mode 100755 index 9ea4c07bb..000000000 --- a/dist-packages/wordaxe/wordaxe/hyphrules.py +++ /dev/null @@ -1,464 +0,0 @@ -# -*- coding: iso-8859-1 -*- - -__license__=""" - Copyright 2004-2008 Henning von Bargen (henning.vonbargen arcor.de) - This software is dual-licenced under the Apache 2.0 and the - 2-clauses BSD license. For details, see license.txt -""" - -__version__=''' $Id: __init__.py,v 1.2 2004/05/31 22:22:12 hvbargen Exp $ ''' - -import logging -logging.basicConfig() -log = logging.getLogger("HyphRules") -log.setLevel(logging.WARNING) - -from wordaxe.hyphen import SHY, HyphenationPoint - -class AlgorithmError(Exception): - pass - -def decodeTrennung(t): - """Hyphenates a word whose hyphenation point are explicitly given. - For example "play5er". - """ - W=[] - p=0 - for i in range(len(t)): - if t[i] in "123456789": - q = int(t[i]) - W.append(HyphenationPoint(p,q,0,SHY,0,u"")) - else: - p += 1 - return W - -class HyphRule: - """Definition of a rule for hyphenation. - """ - name = "generic hyphenation rule - do not use directly" - - # When to check this rule (in chronological order): - PRE_PIECE = 0 # before adding this piece to the WordFrag - PRE_ROOT = 1 # before adding the root of the wordFrag (without knowing the root) - PRE_NEXT_PIECE = 2 # before adding the next piece to the WordFrag - PRE_WORD = 3 # before adding the WordFrag as a word to the compound word - PRE_NEXT_WORD = 4 # before adding the next word to the compound word - AT_END = 5 # when the compound word is complete - - def __init__(self,when): - self.when = when - self.args = "" - - def check(self,wfrag,when): - """Check if a give word fulfills this rule. - """ - raise AlgorithmError - - def __str__(self): - return self.name + " " + self.args - - def __repr__(self): - return "HyphRule(%s %r)" % (self.name, self.args) - -class NEED_PREFIX(HyphRule): - """The given wordfrag needs a prefix - (any or one of those given in args). - """ - name = "NEED_PREFIX" - - def __init__(self,args=""): - HyphRule.__init__(self,[HyphRule.PRE_PIECE]) - if args: - self.allowedPrefixes = " "+args+" " - else: - self.allowedPrefixes = None - - def check(self,wfrag,when,nextPiece=None): - if when==HyphRule.PRE_PIECE: - if not wfrag.prefix: - return False - if self.allowedPrefixes: - log.debug ("allowedPrefixes: %r", self.allowedPrefixes) - for p in wfrag.prefix: - if " "+p.strval+" " in self.allowedPrefixes: - return True - return False - return True - raise AlgorithmError - -class NO_PREFIX(HyphRule): - """The given wordfrag must not contain any prefix. - (if args is given, args must not contain any of the wordfrag's prefixes). - """ - name = "NO_PREFIX" - - def __init__(self,args=""): - HyphRule.__init__(self,[HyphRule.PRE_PIECE]) - self.forbiddenPrefixes = " "+args+" " - - def check(self,wfrag,when,nextPiece=None): - if when==HyphRule.PRE_PIECE: - if wfrag.prefix: - return False - if self.forbiddenPrefixes: - for p in wfrag.prefix: - if " "+p.strval+" " in self.allowedPrefixes: - return False - return True - raise AlgorithmError - -class NEED_SUFFIX(HyphRule): - """The given wordfrag needs a suffix. - """ - name = "NEED_SUFFIX" - - def __init__(self,args=""): - HyphRule.__init__(self,[HyphRule.PRE_NEXT_PIECE]) - if args: raise ValueError - - def check(self,wfrag,when,nextPiece=None): - #log.debug ("when=%d, nextPiece=%r", when, nextPiece) - if when==HyphRule.PRE_NEXT_PIECE: - assert isinstance(nextPiece,Suffix) - return True - raise AlgorithmError - -class NO_SUFFIX(HyphRule): - """The given wordfrag must not have any suffix. - """ - name = "NO_SUFFIX" - - def __init__(self,args=""): - HyphRule.__init__(self,[HyphRule.PRE_NEXT_PIECE]) - if args: raise ValueError - - def check(self,wfrag,when,nextPiece=None): - if when==HyphRule.PRE_NEXT_PIECE: - assert isinstance(nextPiece,Suffix), repr(nextPiece.strval) - return False - raise AlgorithmError - -class ForeignWordRule(HyphRule): - """A helper class for ENGLISCH and FREMDWORT (foreign words from different languages)""" - name = "ForeignWordRule" - - def __init__(self,args=""): - HyphRule.__init__(self,[HyphRule.PRE_PIECE,HyphRule.PRE_NEXT_PIECE]) - if args: raise ValueError - - def check(self,wfrag,when,nextPiece=None): - log.debug("%s check %r,%s,%s", self.__class__, wfrag, when, nextPiece) - if when==HyphRule.PRE_PIECE: - if not wfrag.root: # called for a prefix or the root - log.debug("PRE_PIECE called for a prefix or the root") - if isinstance(nextPiece,Root): - setattr(wfrag,self.name,True) # notify the suffixes - log.debug("Attribut %s gesetzt bei Objekt %s %s", self.name, wfrag.__class__, id(wfrag)) - return True - else: # called for a suffix - log.debug("PRE_PIECE called for a suffix") - log.debug("self.name=%s", self.name) - log.debug("wfrag.id=%s, wfrag.dir=%s", id(wfrag), dir(wfrag)) - return hasattr(wfrag,self.name) - elif when==HyphRule.PRE_NEXT_PIECE: - if isinstance(nextPiece,Root): # called for the last prefix - for prop in nextPiece.props: # return True iff the root is a FREMDWORT - if prop.name ==self.name: - return True - return False - return True # called for anything else but the last prefix - raise AlgorithmError - -class ENGLISCH(ForeignWordRule): - """The given wordfrag is ENGLISCH (coming from English). - Therefore different prefixes and suffixes can be checked. - If this is defined for the last prefix in a wordfrag, then the root must be ENGLISCH, too. - If this is defined for a Suffix, then the root must be ENGLISCH, too. - """ - name = "ENGLISCH" - - def __init__(self,args=""): - ForeignWordRule.__init__(self,args) - -class FREMDWORT(ForeignWordRule): - """The given wordfrag is a FREMDWORT (coming from Greek or Latin). - Therefore different prefixes and suffixes can be checked. - If this is defined for the last prefix in a wordfrag, then the root must be FREMDWORT, too. - If this is defined for a Suffix, then the root must be FREMDWORT, too. - """ - name = "FREMDWORT" - - def __init__(self,args=""): - ForeignWordRule.__init__(self,args) - -class ONLY_FIRST(HyphRule): - """This prefix (resp. suffix) must be the first prefix (resp. suffix). - """ - name = "ONLY_FIRST" - - def __init__(self,args=""): - HyphRule.__init__(self,[HyphRule.PRE_PIECE]) - if args: raise ValueError - - def check(self,wfrag,when,nextPiece=None): - if when==HyphRule.PRE_PIECE: - log.debug("ONLY_FIRST PRE_PIECE chk, wfrag=%s, nextPiece=%s", wfrag, nextPiece) - if isinstance(nextPiece,Prefix): - return (not wfrag.prefix) - elif isinstance(nextPiece,Suffix): - return (not wfrag.suffix) - raise AlgorithmError - -class ONLY_LAST(HyphRule): - """This prefix (resp. suffix) must be the last prefix (resp. suffix). - """ - name = "ONLY_LAST" - - def __init__(self,args=""): - HyphRule.__init__(self,[HyphRule.PRE_NEXT_PIECE]) - if args: raise ValueError - - def check(self,wfrag,when,nextPiece=None): - if when==HyphRule.PRE_NEXT_PIECE: - if isinstance(nextPiece,Prefix) \ - or isinstance(nextPiece,Suffix): - return False - return True - raise AlgorithmError - -class ONLY_FIRST_WORD(HyphRule): - """This word must be the first word in a compound word. - """ - name = "ONLY_FIRST_WORD" - - def __init__(self,args=""): - HyphRule.__init__(self,[HyphRule.PRE_WORD]) - if args: raise ValueError - - def check(self,compWord,when,nextPiece=None): - if when==HyphRule.PRE_WORD: - return compWord==[] - raise AlgorithmError - -class ONLY_LAST_WORD(HyphRule): - """This word must be the last word in a compound word. - """ - name = "ONLY_LAST_WORD" - - def __init__(self,args=""): - HyphRule.__init__(self,[HyphRule.PRE_NEXT_WORD]) - if args: raise ValueError - - def check(self,compWord,when,nextPiece=None): - if when==HyphRule.PRE_NEXT_WORD: - return False - raise AlgorithmError - -class NOT_AFTER_WORD(HyphRule): - """This word must not follow any of the words given in args. - """ - name = "NOT_AFTER_WORD" - - def __init__(self,args=""): - HyphRule.__init__(self,[HyphRule.PRE_WORD]) - self.args = " "+args+" " - - def check(self,compWord,when,nextPiece=None): - if when==HyphRule.PRE_WORD: - if compWord and " "+compWord[-1].root.strval+" " in self.args: - return False - return True - raise AlgorithmError - -class NOT_LAST_WORD(HyphRule): - """This word must not be the last word in a compound word. - """ - name = "NOT_LAST_WORD" - - def __init__(self,args=""): - HyphRule.__init__(self,[HyphRule.PRE_NEXT_WORD,HyphRule.AT_END]) - if args: raise ValueError - - def check(self,wfrag,when,nextPiece=None): - if when==HyphRule.PRE_NEXT_WORD: - if not wfrag: - return True # log.error ("NOT_LAST_WORD: wfrag=%r", wfrag) - else: - setattr(wfrag[-1],self.name,True) - return True - elif when==HyphRule.AT_END: - return hasattr(wfrag[-1],self.name) - raise AlgorithmError, when - -class SINGLE_WORD(HyphRule): - """This word must be the only one in a compound word. - """ - name = "SINGLE_WORD" - - def __init__(self,args=""): - HyphRule.__init__(self,[HyphRule.PRE_WORD,HyphRule.PRE_NEXT_WORD]) - if args: raise ValueError - - def check(self,compWord,when,nextPiece=None): - if when==HyphRule.PRE_WORD: - return compWord==[] - elif when==HyphRule.PRE_NEXT_WORD: - return False - raise AlgorithmError - -class ONLY_AFTER(HyphRule): - """This piece may only follow immediately after one of the pieces given in args. - """ - name = "ONLY_AFTER" - def __init__(self,args=""): - HyphRule.__init__(self,[HyphRule.PRE_PIECE]) - self.args = " "+args+" " - - def check(self,wfrag,when,nextPiece=None): - if when==HyphRule.PRE_PIECE: - if isinstance(nextPiece,Prefix): - return wfrag.prefix and (" "+wfrag.prefix[-1].strval+" " in self.args) - elif isinstance(nextPiece,Suffix): - return wfrag.suffix and (" "+wfrag.suffix[-1].strval+" " in self.args) - # not allowed for Root - raise AlgorithmError - -class NOT_AFTER(HyphRule): - """This piece must not follow immediately after any of the pieces given in args. - """ - name = "NOT_AFTER" - - def __init__(self,args=""): - HyphRule.__init__(self,[HyphRule.PRE_PIECE]) - self.args = " "+args+" " - - def check(self,wfrag,when,nextPiece=None): - if when==HyphRule.PRE_PIECE: - if isinstance(nextPiece,Prefix): - return not (wfrag.prefix and (" "+wfrag.prefix[-1].strval+" " in self.args)) - elif isinstance(nextPiece,Suffix): - return not (wfrag.suffix and (" "+wfrag.suffix[-1].strval+" " in self.args)) - # not allowed for Root - raise AlgorithmError - -class NOT_BEFORE(HyphRule): - """This piece must not stand immediately before any of the pieces given in args. - """ - name = "NOT_BEFORE" - def __init__(self,args=""): - HyphRule.__init__(self,[HyphRule.PRE_NEXT_PIECE]) - self.args = " "+args+" " - - def check(self,wfrag,when,nextPiece=None): - if when==HyphRule.PRE_NEXT_PIECE: - if isinstance(nextPiece,Prefix): - return not (wfrag.prefix and (" "+wfrag.prefix[-1].strval+" " in self.args)) - elif isinstance(nextPiece,Suffix): - return not (wfrag.suffix and (" "+wfrag.suffix[-1].strval+" " in self.args)) - else: - return True # don't check the rule for the last prefix (nextPiece is the Root). - raise AlgorithmError - -class TRENNUNG(HyphRule): - """The hyphenation for this root (or a special word) - is given explicitly (don't use Duden-Algorithm). - This rule is special, because there is a hard-coded - reference to it in the algorithm. - """ - name = "TRENNUNG" - - def __init__(self,args=""): - HyphRule.__init__(self,[HyphRule.AT_END]) - self.args = decodeTrennung(args) - - def check(self,wfrag,when,nextPiece=None): - return True - -class KEEP_TOGETHER(TRENNUNG): - """Do not hyphenate inside this root (or special word). - """ - name = "KEEP_TOGETHER" - def __init__(self,args=""): - HyphRule.__init__(self,[HyphRule.AT_END]) - assert args=="" - self.args = [] - - def check(self,wfrag,when,nextPiece=None): - return True - -class NOT_BEFORE_CHAR(HyphRule): - """This piece must not stand before one of the characters in args. - """ - name = "NOT_BEFORE_CHAR" - def __init__(self,args=""): - HyphRule.__init__(self,[HyphRule.PRE_NEXT_PIECE]) - self.args = args - - def check(self,wfrag,when,nextPiece=None): - if when==HyphRule.PRE_NEXT_PIECE: - return nextPiece.strval[0] not in self.args - raise AlgorithmError - -class NOT_AFTER_CHAR(HyphRule): - """This piece must not follow after any of the characters in args. - """ - name = "NOT_AFTER_CHAR" - def __init__(self,args=""): - HyphRule.__init__(self,[HyphRule.PRE_PIECE]) - self.args = args - - def check(self,wfrag,when,nextPiece=None): - if when==HyphRule.PRE_PIECE: - if wfrag.suffix: - return wfrag.suffix[-1].strval[-1] not in self.args - elif wfrag.root: - return wfrag.root.strval[-1] not in self.args - elif wfrag.prefix: - return wfrag.prefix[-1].strval[-1] not in self.args - raise AlgorithmError - -rulelist = [ NEED_PREFIX, NO_PREFIX, - NEED_SUFFIX, NO_SUFFIX, - ENGLISCH, FREMDWORT, - ONLY_FIRST, ONLY_LAST, - ONLY_FIRST_WORD, ONLY_LAST_WORD, NOT_AFTER_WORD, NOT_LAST_WORD, SINGLE_WORD, - ONLY_AFTER, NOT_AFTER, - NOT_BEFORE, - TRENNUNG, KEEP_TOGETHER, - NOT_BEFORE_CHAR, NOT_AFTER_CHAR, - ] - -RULES = dict([(r.name,r) for r in rulelist]) - -def NO_CHECKS(siz=6): - return [list() for x in range(siz)] - -class StringWithProps: - """A string with properties.""" - __slots__ = ["strval", "props"] - def __init__(self,s,p): - self.strval = s - self.props = p - def __str__(self): - #raise ValueError - return self.strval - - def getChecks(self): - """return a 6-element list, where each element is a list of HyphRules: - [PRE_PIECE checks, PRE_ROOT checks, PRE_NEXT_PIECE checks, - PRE_WORD checks, PRE_NEXT_WORD checks, AT_END checks]. - """ - chks=NO_CHECKS() - for p in self.props: - for w in p.when: - chks[w].append(p) - return chks - -class Prefix(StringWithProps): - pass - -class Root(StringWithProps): - pass - -class Suffix(StringWithProps): - pass diff --git a/dist-packages/wordaxe/wordaxe/plugins/PyHyphenHyphenator.py b/dist-packages/wordaxe/wordaxe/plugins/PyHyphenHyphenator.py deleted file mode 100755 index c0fff220a..000000000 --- a/dist-packages/wordaxe/wordaxe/plugins/PyHyphenHyphenator.py +++ /dev/null @@ -1,114 +0,0 @@ -#!/usr/bin/env python -# -*- coding: iso-8859-1 -*- - -__license__=""" - Copyright 2004-2008 Henning von Bargen (henning.vonbargen arcor.de) - This software is dual-licenced under the Apache 2.0 and the - 2-clauses BSD license. For details, see license.txt -""" - -__version__=''' $Id: __init__.py,v 1.2 2004/05/31 22:22:12 hvbargen Exp $ ''' - -import os.path - -import hyphen as pyhyphen -import hyphen.dictools as dictools - -from wordaxe.hyphen import HyphenatedWord, HyphenationPoint - -from wordaxe.ExplicitHyphenator import ExplicitHyphenator - -class PyHyphenHyphenator(ExplicitHyphenator): - """ - Hyphenation using Leo's excellent pyhyphen package from - http://pyhyphen.googlecode.com. - As it seems, it is triple-licensed (see its __init__.py header). - So it should be ok to include it here. - To use it, you have to first download and install pyhyphen - using the usual python setup.py install procedure, - then install the dictionary files you need (from OpenOffice) - in the wordaxe/dict directory. If you have a working internet - connection, the dictionary file will be installed on demand. - - To use the hyphenator: - - from wordaxe.plugins.PyHyphenHyphenator import PyHyphenHyphenator - hyphenator = PyHyphenHyphenator("de_DE",5) - hw = hyphenator.hyphenate(u"Python-Übungsleiter") - hw.showHyphens() - """ - - def __init__ (self, - language="EN", - minWordLength=4, - quality=8, - hyphenDir=None, - **options - ): - ExplicitHyphenator.__init__(self,language=language,minWordLength=minWordLength,**options) - if hyphenDir is None: - hyphenDir = os.path.join(os.path.split(__file__)[0], "..", "dict") - fname = os.path.join(hyphenDir, "hyph_%s.dic" % language) - if not dictools.is_installed(language, directory=hyphenDir): - dictools.install(language, directory=hyphenDir) - print "installed dictionary for %s into %s" % (language, hyphenDir) - self.hnj = pyhyphen.hyphenator(language, directory=hyphenDir) - self.quality = quality - - # Hilfsfunktion - def schiebe(self,offset,L): - return [HyphenationPoint(h.indx+offset,h.quality,h.nl,h.sl,h.nr,h.sr) for h in L] - - def zerlegeWort(self,zusgWort): - hyphPoints = [] - for left, right in self.hnj.pairs(zusgWort): - # Uncomment next line for an example of non-standard hyphenation - # if left=="schif" and right=="fahrt": left="schiff" - sl = self.shy - if left[-1] in [u"-", self.shy]: - sl = u"" - if left + right == zusgWort: - hp = HyphenationPoint(len(left),self.quality,0,sl,0,u"") - else: - # Handle non-standard hyphenation - # TODO: Test this. - for i, ch in enumerate(left): - if ch != zusgWort[i]: - nl = len(left)-i - sl = left[i:] + sl - break - else: - nl = 0 - pos = len(left) - thgir = list(right) - thgir.reverse() - for i, ch in enumerate(thgir): - if ch != zusgWort[-i-1]: - nr = 0 - sr = right[:-i-1] - break - else: - nr = 0 - sr = right[:len(left)+len(right)-len(zusgWort)] - assert sr, ("This should be handled via left+right==zusgWort", left, right, zusgWort) - hp = HyphenationPoint(len(left),self.quality,nl,sl,nr,sr) - hyphPoints.append(hp) - return hyphPoints - - def hyph(self,aWord): - assert isinstance(aWord, unicode) - hword = HyphenatedWord(aWord, hyphenations=self.zerlegeWort(aWord)) - # None (unknown) kann hier nicht vorkommen, da der - # Algorithmus musterbasiert funktioniert und die Wörter - # sowieso nicht "kennt" oder "nicht kennt". - return hword - - def i_hyphenate(self, aWord): - return ExplicitHyphenator.i_hyphenate_derived(self, aWord) - -if __name__=="__main__": - h = PyHyphenHyphenator("de_DE",5) - h.add_entries({u"wordaxe": u"word8axe", - }) - h.test(outfname="PyHyphenLearn.html") - diff --git a/dist-packages/wordaxe/wordaxe/plugins/__init__.py b/dist-packages/wordaxe/wordaxe/plugins/__init__.py deleted file mode 100755 index 8a16140c5..000000000 --- a/dist-packages/wordaxe/wordaxe/plugins/__init__.py +++ /dev/null @@ -1,17 +0,0 @@ -# -*- coding: iso-8859-1 -*- - -__license__=""" - Copyright 2004-2008 Henning von Bargen (henning.vonbargen arcor.de) - This software is dual-licenced under the Apache 2.0 and the - 2-clauses BSD license. For details, see license.txt -""" - -__version__=''' $Id: __init__.py,v 1.2 2004/05/31 22:22:12 hvbargen Exp $ ''' - -__doc__= """ -Optional plugins for wordaxe can be installed here. -You can also use this package to avoid name conflicts: -For example: import hyphen to import the pyhyphen package -would not work from the wordaxe directory, since wordaxe -itself contains a module "hyphen". But from here, it works. -""" diff --git a/dist-packages/wordaxe/wordaxe/rl/NewParagraph.py b/dist-packages/wordaxe/wordaxe/rl/NewParagraph.py deleted file mode 100755 index ecf04f04b..000000000 --- a/dist-packages/wordaxe/wordaxe/rl/NewParagraph.py +++ /dev/null @@ -1,1358 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- - -# A new paragraph implementation - -__doc__ = """ -A new Paragraph implementation. - -A Paragraph can be constructed in one of two ways: - * supplying text (with support for a HTML-subset formatting) - * supplying frags directly. -If text is supplied, the constructor calls the ParaParser -to parse it and construct frags. - -Please note that a "frag" is different from the ReportLab -standard "frag": Here, a frag is either a StyledWord instance -or a StyledFragment instance (see para_fragments.py). -However, there are two functions in para_fragments.py that -allow you to convert classic RL frag lists to wordaxe frag lists -and vice versa: -frags_reportlab_to_wordaxe and frags_wordaxe_to_reportlab - -The following is a definition of some typographic concepts: -´ - -BASELINE, ASCENT, DESCENT: - Characters seem to "rest" on the baseline. - The ASCENT of a font is the maximum distance from the baseline to - the top of upper-case characters (accents not counted). - Usually,all upper-case characters in a fonts have the same height, - and characters like 'b', 'd', 'l', 't' have this same height, too. - The DESCENT of a font is the maximum distance from the baseline - to the bottom of characters like 'f','g', 'j' etc. - - Note that other, non-character glyphs (like the integral symbol) - may differ in height, for example, the integral symbol's height - is greater than the font's ascent+descent. - -LEADING: - (pronounced like heading, it comes from the metal used in - typesetting). ------------------------------------------------------------------ - Note: The definition used inside the ReportLab toolkit is - different from the definition used elsewhere! ------------------------------------------------------------------ - While the common definition is "the space between the bottom - of the characters of one line and the top of the characters in - the next line (i.e. line height = ASCENT+DESCENT+LEADING), - ReportLab uses a different definition - see userguide.pdf, - "Text object methods, Interline spacing (Leading)". - -See also: - * http://developer.apple.com/documentation/mac/Text/Text-186.html - * http://java.sun.com/developer/onlineTraining/Media/2DText/other.html -Ascent: - - -""" - -from reportlab.lib.units import cm -from reportlab.lib.enums import TA_LEFT, TA_RIGHT, TA_CENTER, TA_JUSTIFY - -from wordaxe.rl.kerning_info import kerning_pairs - -try: - from reportlab.lib.geomutils import normalizeTRBL -except ImportError: - def normalizeTRBL(p): - # the essence of the normalizeTRBL function in Reportlab 2.3 - if not isinstance(p, (tuple, list)): - return (p, p, p, p) - l = len(p) - return tuple(p) + tuple([ p[i-2] for i in range(l, 4) ]) - - -from reportlab.platypus.flowables import Flowable -from reportlab.rl_config import platypus_link_underline -import re -from copy import copy, deepcopy - -import wordaxe -from wordaxe.hyphen import HyphenationPoint, SHY, HyphenatedWord, Hyphenator -from wordaxe.rl.paraparser import ParaParser, NoBrParaParser -from wordaxe.rl.para_fragments import * - -pt = 1 # Points is the base unit in RL - -# This is more or less copied from RL paragraph - -def cleanBlockQuotedText(text,joiner=u' '): - """This is an internal utility which takes triple- - quoted text form within the document and returns - (hopefully) the paragraph the user intended originally.""" - def _lineClean(line): - return u' '.join([x for x in line.strip().split(u' ') if x]) - lines=[_lineClean(line) for line in text.split('\n')] - return joiner.join(line for line in lines if line) - - -def setXPos(tx,dx): - if dx>1e-6 or dx<-1e-6: - tx.setXPos(dx) - - -# This is more or less copied from RL paragraph - -def imgVRange(h,va,fontSize): - '''return bottom,top offsets relative to baseline(0)''' - if va=='baseline': - iyo = 0 - elif va in ('text-top','top'): - iyo = fontSize-h - elif va=='middle': - iyo = fontSize - (1.2*fontSize+h)*0.5 - elif va in ('text-bottom','bottom'): - iyo = fontSize - 1.2*fontSize - elif va=='super': - iyo = 0.5*fontSize - elif va=='sub': - iyo = -0.5*fontSize - elif hasattr(va,'normalizedValue'): - iyo = va.normalizedValue(fontSize) - else: - iyo = va - return iyo,iyo+h - -_56=5./6 -_16=1./6 -def _putFragLine(cur_x, tx, line): - #print "_putFragLine", line - assert isinstance(line, Line) - xs = tx.XtraState - cur_y = xs.cur_y - #print "_putFragLine: xs.cur_y:", xs.cur_y - x0 = tx._x0 - autoLeading = xs.autoLeading - leading = xs.leading - cur_x += xs.leftIndent - dal = autoLeading in ('min','max') - if dal: - if autoLeading=='max': - ascent = max(_56*leading,line.ascent) - descent = max(_16*leading,-line.descent) - else: - ascent = line.ascent - descent = -line.descent - leading = ascent+descent - if tx._leading!=leading: - tx.setLeading(leading) - if dal: - olb = tx._olb - if olb is not None: - xcy = olb-ascent - if tx._oleading!=leading: - cur_y += leading - tx._oleading - if abs(xcy-cur_y)>1e-8: - cur_y = xcy - tx.setTextOrigin(x0,cur_y) - xs.cur_y = cur_y - tx._olb = cur_y - descent - tx._oleading = leading - ws = getattr(tx,'_wordSpace',0) - nSpaces = 0 - - fragments = list(frags_wordaxe_to_reportlab(line.iter_print_frags())) - #print "fragments:", fragments - for frag in fragments: - #print "render %r" % getattr(frag, "text", "--") - f = frag.style - if hasattr(f,'cbDefn'): - #print "render", f - cbDefn = f.cbDefn - kind = cbDefn.kind - if kind=='img': - #draw image cbDefn,cur_y,cur_x - w = cbDefn.width - h = cbDefn.height - txfs = tx._fontsize - if txfs is None: - txfs = xs.style.fontSize - iy0,iy1 = imgVRange(h,cbDefn.valign,txfs) - cur_x_s = cur_x + nSpaces*ws - tx._canvas.drawImage(cbDefn.image,cur_x_s,cur_y+iy0,w,h,mask='auto') - cur_x += w - cur_x_s += w - setXPos(tx,cur_x_s-tx._x0) - else: - name = cbDefn.name - if kind=='anchor': - tx._canvas.bookmarkHorizontal(name,cur_x,cur_y+leading) - else: - func = getattr(tx._canvas,name,None) - if not func: - raise AttributeError("Missing %s callback attribute '%s'" % (kind,name)) - func(tx._canvas,kind,cbDefn.label) - if frag is fragments[-1]: - if not tx._fontname: - tx.setFont(xs.style.fontName,xs.style.fontSize) - tx._textOut('',1) - elif kind in ('img','anchor'): - tx._textOut('',1) - else: - cur_x_s = cur_x + nSpaces*ws - if (tx._fontname,tx._fontsize)!=(f.fontName,f.fontSize): - tx._setFont(f.fontName, f.fontSize) - if xs.textColor!=f.textColor: - xs.textColor = f.textColor - tx.setFillColor(f.textColor) - if xs.rise!=f.rise: - xs.rise=f.rise - tx.setRise(f.rise) - text = frag.text - kp = getattr(frag, "kerning_pairs", None) - tx._textOut(text,frag is fragments[-1], kerning_pairs=kp) # cheap textOut - - # Background colors (done like underline) - #print "f:", repr(f) - backColor = getattr(f, "backColor", None) - if xs.backgroundColor != backColor or xs.backgroundFontSize != f.fontSize: - if xs.backgroundColor is not None: - xs.backgrounds.append( (xs.background_x, cur_x_s, xs.backgroundColor, xs.backgroundFontSize) ) - xs.background_x = cur_x_s - xs.backgroundColor = backColor - xs.backgroundFontSize = f.fontSize - # Underline - if not xs.underline and f.underline: - xs.underline = 1 - xs.underline_x = cur_x_s - xs.underlineColor = f.textColor - elif xs.underline: - if not f.underline: - xs.underline = 0 - xs.underlines.append( (xs.underline_x, cur_x_s, xs.underlineColor) ) - xs.underlineColor = None - elif xs.textColor!=xs.underlineColor: - xs.underlines.append( (xs.underline_x, cur_x_s, xs.underlineColor) ) - xs.underlineColor = xs.textColor - xs.underline_x = cur_x_s - if not xs.strike and f.strike: - xs.strike = 1 - xs.strike_x = cur_x_s - xs.strikeColor = f.textColor - elif xs.strike: - if not f.strike: - xs.strike = 0 - xs.strikes.append( (xs.strike_x, cur_x_s, xs.strikeColor) ) - xs.strikeColor = None - elif xs.textColor!=xs.strikeColor: - xs.strikes.append( (xs.strike_x, cur_x_s, xs.strikeColor) ) - xs.strikeColor = xs.textColor - xs.strike_x = cur_x_s - if f.link and not xs.link: - if not xs.link: - xs.link = f.link - xs.link_x = cur_x_s - xs.linkColor = xs.textColor - elif xs.link: - if not f.link: - xs.links.append( (xs.link_x, cur_x_s, xs.link, xs.linkColor) ) - xs.link = None - xs.linkColor = None - elif f.link!=xs.link or xs.textColor!=xs.linkColor: - xs.links.append( (xs.link_x, cur_x_s, xs.link, xs.linkColor) ) - xs.link = f.link - xs.link_x = cur_x_s - xs.linkColor = xs.textColor - txtlen = tx._canvas.stringWidth(text, tx._fontname, tx._fontsize) - # TODO why is this? We already have got the text length!? - cur_x += txtlen - nSpaces += text.count(' ') - cur_x_s = cur_x+(nSpaces-1)*ws - if xs.underline: - xs.underlines.append( (xs.underline_x, cur_x_s, xs.underlineColor) ) - if xs.backgroundColor is not None: - xs.backgrounds.append( (xs.background_x, cur_x_s, xs.backgroundColor, xs.backgroundFontSize) ) - if xs.strike: - xs.strikes.append( (xs.strike_x, cur_x_s, xs.strikeColor) ) - if xs.link: - xs.links.append( (xs.link_x, cur_x_s, xs.link,xs.linkColor) ) - if tx._x0!=x0: - setXPos(tx,x0-tx._x0) - -def _drawBullet(canvas, offset, cur_y, bulletText, style): - '''draw a bullet text could be a simple string or a frag list''' - tx2 = canvas.beginText(style.bulletIndent, cur_y+getattr(style,"bulletOffsetY",0)) - tx2.setFont(style.bulletFontName, style.bulletFontSize) - tx2.setFillColor(hasattr(style,'bulletColor') and style.bulletColor or style.textColor) - if isinstance(bulletText,basestring): - tx2.textOut(bulletText) - else: - for f in bulletText: - tx2.setFont(f.fontName, f.fontSize) - tx2.setFillColor(f.textColor) - tx2.textOut(f.text) - - canvas.drawText(tx2) - #AR making definition lists a bit less ugly - #bulletEnd = tx2.getX() - bulletEnd = tx2.getX() + style.bulletFontSize * 0.6 - offset = max(offset,bulletEnd - style.leftIndent) - return offset - -def _handleBulletWidth(bulletText, style, max_widths): - '''work out bullet width and adjust max_widths[0] if neccessary - ''' - if bulletText: - if isinstance(bulletText,basestring): - bulletWidth = pdfmetrics.stringWidth( bulletText, style.bulletFontName, style.bulletFontSize) - else: - #it's a list of fragments - bulletWidth = 0 - for f in bulletText: - bulletWidth = bulletWidth + pdfmetrics.stringWidth(f.text, f.fontName, f.fontSize) - bulletRight = style.bulletIndent + bulletWidth + 0.6 * style.bulletFontSize - indent = style.leftIndent+style.firstLineIndent - if bulletRight > indent: - #..then it overruns, and we have less space available on line 1 - max_widths[0] -= (bulletRight - indent) - -_scheme_re = re.compile('^[a-zA-Z][-+a-zA-Z0-9]+$') -def _doLink(tx,link,rect): - if isinstance(link,unicode): - link = link.encode('utf8') - parts = link.split(':',1) - scheme = len(parts)==2 and parts[0].lower() or '' - if _scheme_re.match(scheme) and scheme!='document': - kind=scheme.lower()=='pdf' and 'GoToR' or 'URI' - if kind=='GoToR': link = parts[1] - tx._canvas.linkURL(link, rect, relative=1, kind=kind) - else: - if link[0]=='#': - link = link[1:] - scheme='' - tx._canvas.linkRect("", scheme!='document' and link or parts[1], rect, relative=1) - -def _do_post_text(tx): - - xs = tx.XtraState - leading = xs.style.leading - autoLeading = xs.autoLeading - - if True: - f = xs.f - if autoLeading=='max': - leading = max(leading,1.2*f.fontSize) - elif autoLeading=='min': - leading = 1.2*f.fontSize - ff = 0.125*f.fontSize - y0 = xs.cur_y - y = y0 - ff - - # Background - ulc = None - for x1,x2,c,fs in xs.backgrounds: - # print "u",x1,x2,c, leading, ff, i, fs - if c!=ulc: - tx._canvas.setFillColor(c) - ulc = c - #tx._canvas.rect(x1, y, x2-x1, fs, fill=1, stroke=0) - tx._canvas.rect(x1, y - ff, x2-x1, fs, fill=1, stroke=0) - xs.backgrounds = [] - xs.background = 0 - xs.backgroundColor = None - xs.backgroundFontSize = None - - # Underline - csc = None - for x1,x2,c in xs.underlines: - if c!=csc: - tx._canvas.setStrokeColor(c) - csc = c - tx._canvas.line(x1, y, x2, y) - xs.underlines = [] - xs.underline=0 - xs.underlineColor=None - - ys = y0 + 2*ff - for x1,x2,c in xs.strikes: - if c!=csc: - tx._canvas.setStrokeColor(c) - csc = c - tx._canvas.line(x1, ys, x2, ys) - xs.strikes = [] - xs.strike=0 - xs.strikeColor=None - - yl = y + leading - for x1,x2,link,c in xs.links: - if platypus_link_underline: - if c!=csc: - tx._canvas.setStrokeColor(c) - csc = c - tx._canvas.line(x1, y, x2, y) - _doLink(tx, link, (x1, y, x2, yl)) - xs.links = [] - xs.link=None - xs.linkColor=None - #print "leading:", leading - xs.cur_y -= leading - #print "xs.cur_y:", xs.cur_y - - -def textTransformFrags(frags,style): - tt = style.textTransform - if tt: - tt=tt.lower() - if tt=='lowercase': - tt = unicode.lower - elif tt=='uppercase': - tt = unicode.upper - elif tt=='capitalize': - tt = unicode.title - elif tt=='none': - return - else: - raise ValueError('ParaStyle.textTransform value %r is invalid' % style.textTransform) - n = len(frags) - if n==1: - #single fragment the easy case - frags[0].text = tt(frags[0].text) - elif tt is unicode.title: - pb = True - for f in frags: - t = f.text - if not t: continue - u = t - if u.startswith(u' ') or pb: - u = tt(u) - else: - i = u.find(u' ') - if i>=0: - u = u[:i]+tt(u[i:]) - pb = u.endswith(u' ') - f.text = u - else: - for f in frags: - t = f.text - if not t: continue - f.text = tt(t) - - -# Here follows a clean(er) paragraph implemention - -class Paragraph(Flowable): - "A simple new implementation for Paragraph flowables." - - def __init__(self, text, style, bulletText = None, frags=None, lines=None, caseSensitive=1, encoding='utf-8', keepWhiteSpace=False, textCleaner=cleanBlockQuotedText): - """ - Either text and style or frags must be supplied. - """ - self.caseSensitive = caseSensitive - self.style = style - self.bulletText = bulletText - self.keepWhiteSpace = keepWhiteSpace # TODO: Unterstützen - self._cache = {} - - if text is None: - assert frags is not None or lines is not None - self.frags = frags - if frags is None: - #print id(self), "init with %d lines" % len(lines) - for line in lines: assert isinstance(line, Line) - self._cache['lines'] = lines - self._cache['height'] = sum([line.height for line in lines]) - self._cache['avail'] = True - else: - #print id(self), "init with frags", frags - for frag in frags: assert isinstance(frag, Fragment) - self.frags = frags - else: - #print id(self), "init with text" - assert isinstance(text, basestring) - # parse text - if not isinstance(text, unicode): - text = unicode(text, encoding) - if textCleaner: text = textCleaner(text) - self.frags = list(self.parse(text, style, bulletText)) - self.text = text - - def parse(self, text, style, bulletText): - """ - Use the NoBrParaParser to create a list of words. - Yields StyledWords, StyledSpace and other entries, - but StyledTexts are grouped to StyledWords. - """ - wordFrags = [] - "Use the NoBrParaParser to create a sequence of fragments" - parser = NoBrParaParser() - parser.caseSensitive = self.caseSensitive - style, frag_list, bullet_frag_list = parser.parse(text, style) - if bullet_frag_list: - self.bulletText = bullet_frag_list - textTransformFrags(frag_list, style) - self.style = style - return frags_reportlab_to_wordaxe(frag_list, style) - - def __repr__(self): - if self.frags: - return "%s(frags=%r)" % (self.__class__.__name__, self.frags) - elif 'lines' in self._cache: - return "%s(_lines=%r)" % (self.__class__.__name__, self._cache['lines']) - - - def calcLineHeight(self, line): - """ - Compute the height needed for a given line. - """ - #print "calcLineHeight", self.style.leading - return self.style.leading - # TODO or should this be computed from the frags? - - def wrap(self, availW, availH): - """ - Return the actually used size. - """ - #print id(self), "wrap", availW, availH - avail = self._cache.get('avail') - if (avail is True # paragraph has no frags, only lines - or avail == (availW, availH)): # already wrapped to this size - return availW, self._cache['height'] - else: # needs to be wrapped - style = self.style - leftIndent = style.leftIndent - first_line_width = availW - (leftIndent+style.firstLineIndent) - style.rightIndent - later_widths = availW - leftIndent - style.rightIndent - max_widths = [first_line_width, later_widths] - return self.i_wrap(availW, availH, max_widths) - - def i_wrap(self, availW, availH, max_widths): - """ - Return the height and width that are actually needed. - Note: - This will abort if the text does not fit entirely. - The lines measured so far will be stored in a private - attribute _cache['lines'] (to improve performance). - TODO: Should StyledSpaces be ignored before or after StyledNewLines? - """ - #print id(self), "i_wrap", availW, availH - lines = [] # lines so far - sumHeight = 0 # sum of lines heights so far - lineHeight = 0 # height of current line - width = 0 # width of current line - lineFrags = [] # (flattened) fragments in current line - - _handleBulletWidth(self.bulletText, self.style, max_widths) - - def iter_widths(max_widths=max_widths): - # an iterator that repeats the last element infinitely - for w in max_widths: yield w - while True: yield w - width_iter = iter_widths() - max_width = width_iter.next() - - frags_remaining = self.frags[:] - while frags_remaining: - if sumHeight > availH: - #print "sumHeight > availH, break, lineFrags=%s" % lineFrags - break - frag = frags_remaining.pop(0) - actions = [("ERROR",None)] - w = 0 - if isinstance(frag, StyledNewLine): - actions = [("ADD",frag), ("LINEFEED",None)] - elif hasattr(frag, "width"): - w = frag.width - if width + w > max_width: - # does not fit - #print "does not fit:", frag, width, w, max_width - if isinstance(frag, StyledWord): - # Hyphenation support - act, left, right, spaceWasted \ - = self.findBestSolution(lineFrags, frag, max_width-width, True) - # TODO: for now, always try squeeze - if act == self.OVERFLOW: - actions = [("LINEFEED",None),("PUSH",frag)] - elif act == self.SQUEEZE: - actions = [("ADD",frag)] - elif act == self.HYPHENATE: - setattr(left,"_source", frag) - setattr(right,"_source", frag) - actions = [("ADD",left),("LINEFEED",None),("PUSH",right)] - else: - raise AssertionError - else: - actions = [("LINEFEED",None),("PUSH",frag)] - else: - # will fit into current line - actions = [("ADD",frag)] - else: - # Some Meta Fragment - action = ("ADD",frag) - for (act,afrag) in actions: - #print act, width - if act == "LINEFEED": - if not self.keepWhiteSpace: - # ignore space at the end of the line for the - # width calculation - for f in reversed(lineFrags): - if isinstance(f, StyledSpace) or not f.width: - width -= f.width - if width <= 0: - width = 0 - else: - break - #print act, - lineHeight = self.style.leading # TODO correct height calculation - #print lineHeight, - baseline = 0 # TODO correct baseline calculation - line = Line(lineFrags, width, lineHeight, baseline, max_width - width, self.keepWhiteSpace) - lines.append(line) - lineFrags = [] - width = 0 - max_width = width_iter.next() - sumHeight += lineHeight - #print sumHeight - elif act == "IGNORE": - pass - elif act == "ADD": - lineFrags.append(afrag) - # ignore space at the start of the line for the - # width calculation - if not self.keepWhiteSpace \ - and not width and isinstance(afrag, StyledSpace): - pass - else: - width += getattr(afrag, "width", 0) - elif act == "PUSH": - frags_remaining.insert(0, afrag) - else: - raise AssertionError("Action:%r Frag:" % (act,afrag)) - else: - # Everything did fit - lineHeight = self.calcLineHeight(lineFrags) - # TODO: Why here calcLineHeight(), instead of self.style.leading as above? - baseline = 0 # TODO correct baseline calculation - if not lineFrags and lines: - pass # Ignore the final line if it's empty and there are already lines. - else: - # ignore space at the end of the line for the - # width calculation - if not self.keepWhiteSpace: - for f in reversed(lineFrags): - if isinstance(f, StyledSpace) or not f.width: - width -= f.width - - # workaround for tracker item id 2741874: Assert on Paragraph with para tags - # at least the code now looks the same as in the "LINEFEED" case. - if width < 0: - width = 0 - else: - break - line = Line(lineFrags, width, lineHeight, baseline, max_width - width, self.keepWhiteSpace) - lines.append(line) - lineFrags = [] - width = 0 - sumHeight += lineHeight - - self.width = availW - self.height = sumHeight - if sumHeight > availH: - #print id(self), "needs splitting" - #print "lines[-1]:", lines[-1] - #print "frags_remaining:", frags_remaining - # don't store the last line (it does not fit) - # TODO perhaps we have to insert a Linefeed here? - # v - assert not lineFrags, lineFrags - assert lines - unused = lines.pop().fragments - if frags_remaining: - next = frags_remaining[0] - src = getattr(next, "_source", None) - if src is not None: - # next is the right part of a hyphenation - left = unused[-1] - assert getattr(left,"_source") == src - unused[-1] = src - frags_remaining.pop(0) - unused += frags_remaining - assert len(unused) == len(set(unused)) - self.height -= lineHeight - #print "%d lines, lineHeight=%f" % (len(lines), lineHeight) - #print "in wrap: self.height=%f" % (self.height) - #print "self.frags=%s" % self.frags - #if len(lines)==2: - # print lines - else: - #print id(self), "fits" - unused = [] - assert self.height <= availH, (id(self), self.height, availH) - self._cache['lines'] = lines - self._cache['unused'] = unused - self._cache['avail'] = (availW, availH) - self._cache['height'] = sumHeight - #print "i_wrap returns", availW, sumHeight - return availW, sumHeight - - def split(self, availWidth, availHeight): - """ - Split the paragraph into two - """ - #print id(self), "split", availWidth, availHeight - - if availWidth <= 0 or availHeight <= 0: - # cannot split if no space available - return [] - - if 'avail' not in self._cache: - # paragraph has not yet been wrapped - self.wrap(availWidth, availHeight) - - lines = self._cache['lines'] - #print "lines:", lines - unused = self._cache['unused'] - #print "unused:", unused - if len(lines) < 1: # minimum widow rows - #print "split with lines == []" - # Put everything on the next frame - assert self.frags is not None - del self._cache['avail'] - return [] - elif not unused: - # Everything fits on this page - #print "everything fits." - return [self] - else: - style = self.style - # height/leading computation - autoLeading = getattr(self,'autoLeading',getattr(style,'autoLeading','')) - leading = style.leading - if autoLeading not in ('','off'): - s = height = 0 - if autoLeading=='max': - for i,l in enumerate(lines): - h = max(l.ascent-l.descent,leading) - n = height+h - if n>availHeight+1e-8: - break - height = n - s = i+1 - elif autoLeading=='min': - for i,l in enumerate(lines): - n = height+l.ascent-l.descent - if n>availHeight+1e-8: - break - height = n - s = i+1 - else: - raise ValueError('invalid autoLeading value %r' % autoLeading) - else: - l = leading - if autoLeading=='max': - l = max(leading,1.2*style.fontSize) - elif autoLeading=='min': - l = 1.2*style.fontSize - s = int(availHeight/l) - height = s*l - - # Widows/orphans control - # There's some disagreement about definitions of widows and orphans. - # We use the definitions from Wikipedia and the Chicago Manual of Style. - # - # Note: - # We cannot control something like "minimum widow lines", - # since we have not yet computed the lines for the second part. - # Thus we can only support allowOrphans without additional overhead. - n = len(lines) - allowWidows = getattr(style,'allowWidows',1) - allowOrphans = getattr(style,'allowOrphans',0) - #print "allowOrphans:", allowOrphans - if not allowOrphans: - if s <= 1: #orphan? - del self._cache['avail'] - #print "orphans not allowed => return []" - return [] - if False and not allowWidows: - # NOT SUPPORTED - if n==s+1: #widow? - if (allowOrphans and n==3) or n>3: - s -= 1 #give the widow some company - else: - #no room for adjustment; force the whole para onwards - del self._cache['avail'] - return [] - first = self.__class__(text=None, style=self.style, bulletText=self.bulletText, lines=lines, caseSensitive=self.caseSensitive) - first.width = self.width # TODO 20080911 - first.height = self.height - first._JustifyLast = 1 - if style.firstLineIndent != 0 or not allowOrphans: - style = deepcopy(style) - style.firstLineIndent = 0 - style.allowOrphans = 1 - - # I guess the right place to implement allowWidows is somewhere here: - # We'd have test if the second paragraph consists of one line or more. - # If it's only one line, then we'd have to cut off the last line of the - # first paragraph and move the text on to second paragraph. - - second = self.__class__(text=None, style=style, bulletText=None, frags=unused, caseSensitive=self.caseSensitive) - #print "first id=%d height=%f" % (id(first), first.height) - #print "secnd id=%d" % id(second) - return [first, second] - - - def beginText(self, x, y): - return self.canv.beginText(x, y) - - def draw(self, debug=0): - """ - Draw the paragraph. - """ - #print id(self), "draw" - - # Code more or less copied from RL - - """Draws a paragraph according to the given style. - Returns the final y position at the bottom. Not safe for - paragraphs without spaces e.g. Japanese; wrapping - algorithm will go infinite.""" - - #stash the key facts locally for speed - canvas = self.canv - style = self.style - lines = self._cache['lines'] - leading = style.leading - autoLeading = getattr(self,'autoLeading',getattr(style,'autoLeading','')) - - #work out the origin for line 1 - leftIndent = style.leftIndent - cur_x = leftIndent - - if debug: - bw = 0.5 - bc = Color(1,1,0) - bg = Color(0.9,0.9,0.9) - else: - bw = getattr(style,'borderWidth',None) - bc = getattr(style,'borderColor',None) - bg = style.backColor - - #if has a background or border, draw it - if bg or (bc and bw): - canvas.saveState() - op = canvas.rect - kwds = dict(fill=0,stroke=0) - if bc and bw: - canvas.setStrokeColor(bc) - canvas.setLineWidth(bw) - kwds['stroke'] = 1 - br = getattr(style,'borderRadius',0) - if br and not debug: - op = canvas.roundRect - kwds['radius'] = br - if bg: - canvas.setFillColor(bg) - kwds['fill'] = 1 - bp = getattr(style,'borderPadding',0) - tbp, rbp, bbp, lbp = normalizeTRBL(bp) - - op(leftIndent - lbp, - -bbp, - self.width - (leftIndent+style.rightIndent) + lbp+rbp, - self.height + tbp+bbp, - **kwds) - canvas.restoreState() - - #print "Lines: %s" % lines - nLines = len(lines) - #print "len(lines)", nLines - bulletText = self.bulletText - if nLines > 0: - _offsets = getattr(self,'_offsets',[0]) - _offsets += (nLines-len(_offsets))*[_offsets[-1]] - canvas.saveState() - #canvas.addLiteral('%% %s.drawPara' % _className(self)) - alignment = style.alignment - offset = style.firstLineIndent+_offsets[0] - lim = nLines-1 - noJustifyLast = not (hasattr(self,'_JustifyLast') and self._JustifyLast) - f = lines[0] - #cur_y = self.height - getattr(f,'ascent',f.fontSize) - cur_y = sum([line.height for line in lines]) - f.ascent - - # default? - dpl = self._leftDrawParaLineX - if bulletText: - oo = offset - offset = _drawBullet(canvas,offset,cur_y,bulletText,style) - if alignment == TA_LEFT: - dpl = self._leftDrawParaLineX - elif alignment == TA_CENTER: - dpl = self._centerDrawParaLineX - elif self.style.alignment == TA_RIGHT: - dpl = self._rightDrawParaLineX - elif self.style.alignment == TA_JUSTIFY: - dpl = self._justifyDrawParaLineX - else: - raise ValueError("bad align %s" % repr(alignment)) - - #set up the font etc. - tx = self.beginText(cur_x, cur_y) - xs = tx.XtraState=ABag() - xs.textColor=None - xs.rise=0 - xs.underline=0 - xs.underlines=[] - xs.underlineColor=None - xs.backgrounds = [] - xs.backgroundColor = None - xs.backgroundFontSize = None - xs.strike=0 - xs.strikes=[] - xs.strikeColor=None - xs.links=[] - xs.link=None - xs.leading = style.leading - xs.leftIndent = leftIndent - tx._leading = None - tx._olb = None - xs.cur_y = cur_y - xs.f = f - xs.style = style - xs.autoLeading = autoLeading - - tx._fontname,tx._fontsize = None, None - dpl( tx, offset, lines[0], noJustifyLast and nLines==1) - _do_post_text(tx) - - #now the middle of the paragraph, aligned with the left margin which is our origin. - for i in xrange(1, nLines): - f = lines[i] - dpl( tx, _offsets[i], f, noJustifyLast and i==lim) - _do_post_text(tx) - - canvas.drawText(tx) - canvas.restoreState() - - def _leftDrawParaLineX( self, tx, offset, line, last=0): - if line.space_wasted < 0: - return self._justifyDrawParaLineX(tx,offset,line,last) - setXPos(tx,offset) - _putFragLine(offset, tx, line) - setXPos(tx,-offset) - - def _rightDrawParaLineX( self, tx, offset, line, last=0): - if line.space_wasted < 0: - return self._justifyDrawParaLineX(tx,offset,line,last) - m = offset + line.space_wasted - setXPos(tx,m) - _putFragLine(m, tx, line) - setXPos(tx,-m) - - def _centerDrawParaLineX( self, tx, offset, line, last=0): - if line.space_wasted < 0: - return self._justifyDrawParaLineX(tx,offset,line,last) - m = offset + 0.5 * line.space_wasted - setXPos(tx, m) - _putFragLine(m, tx, line) - setXPos(tx,-m) - - def _justifyDrawParaLineX( self, tx, offset, line, last=0): - setXPos(tx,offset) - frags = line.fragments[:] - while frags and (not frags[0].width or isinstance(frags[0], StyledSpace)): - frags.pop(0) - while frags and (not frags[-1].width or isinstance(frags[-1], StyledSpace)): - frags.pop() - - nSpaces = sum([len(frag.text) for frag in frags if isinstance(frag, StyledSpace)]) - # TODO: if !nSpaces use txt.setCharSpace instead - if last or not nSpaces or abs(line.space_wasted)<=1e-8 or isinstance(frags[-1], StyledNewLine): - _putFragLine(offset, tx, line) #no space modification - else: - tx.setWordSpace(line.space_wasted / float(nSpaces)) - _putFragLine(offset, tx, line) - tx.setWordSpace(0) - setXPos(tx,-offset) - - class OVERFLOW: - pass - class SQUEEZE: - pass - class HYPHENATE: - pass - - def rateHyph(self, base_penalty, frags, word, space_remaining): - """Rate a possible hyphenation point""" - #### The rating could be wrong, in particular if space_remaining is too small! - #print "rateHyph frags=%s, word=%r, space_remaining=%d" % (frags,word, space_remaining) - # All the factors used here are just a wild guess - spaces_width = sum([frag.width for frag in frags if isinstance(frag, StyledSpace)]) - if spaces_width: - stretch = space_remaining/spaces_width - if stretch<0: - stretch_penalty = stretch*stretch*stretch*stretch*5000 - else: - stretch_penalty = stretch*stretch*30 - else: # HVB 20060907: Not a single space so far - if space_remaining > 0: - # TODO this should be easier - lst = [(len(frag.text), frag,width) for frag in frags if hasattr(frag,"text")] - sum_len = sum([x[0] for x in lst]) - sum_width=sum([x[1] for x in lst]) - if sum_len > 0: - avg_char_width = sum_width / sum_len - stretch_penalty = space_remaining/avg_char_width*20 - else: - stretch_penalty = space_remaining*60 - else: - stretch_penalty = 20000 - rating = 16384 - base_penalty - stretch_penalty - #print " rating:", rating - return rating - - # finding bestSolution where the word uses possibly several different font styles - # (action,left,right,spaceWasted) = self.findBestSolution(frags,w,currentWidth,maxWidth,windxquality: - #print "SQZ" - bestSolution = (self.SQUEEZE, word, None, space_remaining - word.width) - quality = q - # try HYPHENATE - for hp in word.text.hyphenations: - left,right = word.splitAt(hp) - #print "left=%r right=%r" % (left, right) - q = self.rateHyph(100-10*hp.quality,frags,left,space_remaining - left.width) - if q>quality: - bestSolution = (self.HYPHENATE, left, right, space_remaining - left.width) - quality = q - if bestSolution[0] is self.OVERFLOW and not nwords: - # We have to make a hard break in the word - #print "FORCE Hyphenation" - # force at least a single character into this line - if not word.fragments: - # this might happen in the degenerated case SW() - bestSolution = (self.SQUEEZE, word, None, space_remaining - word.width) - else: - left, right = word.splitAt(HyphenationPoint(1,1,0,"",0,"")) - bestSolution = (self.HYPHENATE, left, right, 0) - for p in range(1,len(word.text)): - if word.text[p-1] not in ["-",SHY]: - r = SHY - else: - r = "" - left,right = word.splitAt(HyphenationPoint(p,1,0,r,0,"")) - if left.width <= space_remaining: - bestSolution = (self.HYPHENATE, left, right, space_remaining - left.width) - else: - # does not fit anymore - break - - #print "bestSolution for", word, "returns:", HVBDBG.s(bestSolution) - return bestSolution - - - def getPlainText(self,identify=None): - """Convenience function for templates which want access - to the raw text, without XML tags. - - Note: will only get the first part if a paragraph is splitted. - This is not perfect, but should work good enough to be used for the TOC. - """ - text = [] - lines = self._cache.get('lines') - if lines is not None: - for line in lines: - if line is not lines[0]: - text.append(" ") - for frag in line.fragments: - if hasattr(frag, "text"): - text.append(getattr(frag, "text")) - else: - for frag in self.frags: - if hasattr(frag, "text"): - text.append(getattr(frag, "text")) - return "".join(text) - - def minWidth(self): - """Attempt to determine a minimum sensible width""" - if self.frags: - return max([frag.width for frag in self.frags]) - return 0 - -class ParagraphAndImage(Flowable): - '''combine a Paragraph and an Image''' - def __init__(self,P,I,xpad=3,ypad=3,side='right'): - self.P = P - self.I = I - self.xpad = xpad - self.ypad = ypad - self._side = side - - def getSpaceBefore(self): - return max(self.P.getSpaceBefore(),self.I.getSpaceBefore()) - - def getSpaceAfter(self): - return max(self.P.getSpaceAfter(),self.I.getSpaceAfter()) - - def wrap(self,availWidth,availHeight): - wI, hI = self.I.wrap(availWidth,availHeight) - self.wI = wI - self.hI = hI - # work out widths array for breaking - self.width = availWidth - P = self.P - style = P.style - xpad = self.xpad - ypad = self.ypad - leading = style.leading - leftIndent = style.leftIndent - later_widths = availWidth - leftIndent - style.rightIndent - intermediate_widths = later_widths - xpad - wI - first_line_width = intermediate_widths - style.firstLineIndent - P.width = 0 - nIW = int((hI+ypad)/leading) - - if 'avail' in P._cache: - ph = P.height - else: - max_widths = [first_line_width] + nIW*[intermediate_widths] + [later_widths] - pw, ph = P.i_wrap(availWidth, availHeight, max_widths) - if self._side=='left': - self._offsets = [wI+xpad]*(1+nIW)+[0] - self.height = max(hI,ph) - return (self.width, self.height) - - def split(self,availWidth, availHeight): - P, wI, hI, ypad = self.P, self.wI, self.hI, self.ypad - if hI+ypad>availHeight or len(P.frags)<=0: return [] - S = P.split(availWidth,availHeight) - #print S - if not S: return S - P = self.P = S[0] - del S[0] - style = P.style - #P.height = len(self.P.blPara.lines)*style.leading - self.height = max(hI,P.height) - return [self]+S - - def draw(self): - canv = self.canv - if self._side=='left': - self.I.drawOn(canv,0,self.height-self.hI) - self.P._offsets = self._offsets - try: - self.P.drawOn(canv,0,0) - finally: - del self.P._offsets - else: - self.I.drawOn(canv,self.width-self.wI-self.xpad,self.height-self.hI) - self.P.drawOn(canv,0,0) - -# Monkey patch Reportlab textobject -from reportlab.lib.utils import fp_str -from reportlab.pdfbase import pdfmetrics - -def kerning_formatText(self, text, kerning_pairs=None): - "Generates PDF text output operator(s)" - #print "_formatText", text, kerning_pairs - canv = self._canvas - font = pdfmetrics.getFont(self._fontname) - R = [] - if font._dynamicFont: - #it's a truetype font and should be utf8. If an error is raised, - for subset, t in font.splitString(text, canv._doc): - if subset!=self._curSubset: - pdffontname = font.getSubsetInternalName(subset, canv._doc) - R.append("%s %s Tf %s TL" % (pdffontname, fp_str(self._fontsize), fp_str(self._leading))) - self._curSubset = subset - if kerning_pairs is None: - R.append("(%s) Tj" % canv._escape(t)) - else: - # Take kerning into account - # TODO performance tuning possible? - R.append("[") - buf = t[0] - for i in range(len(t)-1): - if kerning_pairs[i]: - R.append(" (%s)" % canv._escape(buf)) - R.append(" %s" % fp_str(-kerning_pairs[i])) # TODO scaling! - buf = "" - buf += t[i+1] - if buf: - R.append(" (%s)" % canv._escape(buf)) - R.append("] TJ") - elif font._multiByte: - #all the fonts should really work like this - let them know more about PDF... - R.append("%s %s Tf %s TL" % ( - canv._doc.getInternalFontName(font.fontName), - fp_str(self._fontsize), - fp_str(self._leading) - )) - R.append("(%s) Tj" % font.formatForPdf(text)) - else: - #convert to T1 coding - fc = font - if not isinstance(text,unicode): - try: - text = text.decode('utf8') - except UnicodeDecodeError,e: - i,j = e.args[2:4] - raise UnicodeDecodeError(*(e.args[:4]+('%s\n%s-->%s<--%s' % (e.args[4],text[max(i-10,0):i],text[i:j],text[j:j+10]),))) - - for f, t in pdfmetrics.unicode2T1(text,[font]+font.substitutionFonts): - if f!=fc: - R.append("%s %s Tf %s TL" % (canv._doc.getInternalFontName(f.fontName), fp_str(self._fontsize), fp_str(self._leading))) - fc = f - R.append("(%s) Tj" % canv._escape(t)) - if font!=fc: - R.append("%s %s Tf %s TL" % (canv._doc.getInternalFontName(self._fontname), fp_str(self._fontsize), fp_str(self._leading))) - return ' '.join(R) - -def kerning_textOut(self, text, TStar=0, kerning_pairs=None): - "prints string at current point, ignores text cursor" - self._code.append('%s%s' % (self._formatText(text, kerning_pairs), (TStar and ' T*' or ''))) - -from reportlab.pdfgen.textobject import PDFTextObject -import new -PDFTextObject._textOut = new.instancemethod(kerning_textOut, None, PDFTextObject) -PDFTextObject._formatText = new.instancemethod(kerning_formatText, None, PDFTextObject) - -# from here on, only test code... - -class HVBDBG: - @staticmethod - def s(obj): - if type(obj) == list: - return "[" + ", ".join([HVBDBG.s(x) for x in obj]) + "]" - elif type(obj) == tuple: - return "(" + ", ".join([HVBDBG.s(x) for x in obj]) + ")" - elif isinstance(obj, ABag): - return "ABag(.text=%r)" % obj.text - elif type(obj) == float: - return "%1.2f" % obj - else: - return repr(obj) - -if __name__ == "__main__": - - - # Test - import styles - styleSheet = styles.getSampleStyleSheet() - style = styleSheet["Normal"] - #text = "Der blaue
Klaus" - #p = Paragraph(text, style) - #print "width=%f" % sum([f.width for f in p.frags if hasattr(f,"width")]) - #print "p=%r" % p - - #p = Paragraph("jetzt auch bunt", style) - #frags = p.frags - #print repr(frags) - #print repr(frags[-1].fragments[0].style) - - import os - import sys - import unittest - - from reportlab.lib.units import cm - from reportlab.lib import pagesizes - from reportlab.lib.styles import getSampleStyleSheet - from reportlab.platypus import Frame, PageTemplate, BaseDocTemplate - - USE_HYPHENATION = True - - if USE_HYPHENATION: - import wordaxe.rl.styles - from wordaxe.DCWHyphenator import DCWHyphenator - wordaxe.hyphRegistry['DE'] = DCWHyphenator('DE', 5) - - PAGESIZE = pagesizes.landscape(pagesizes.A4) - - class TwoColumnDocTemplate(BaseDocTemplate): - "Define a simple, two column document." - - def __init__(self, filename, **kw): - m = 2*cm - cw, ch = (PAGESIZE[0]-2*m)/2., (PAGESIZE[1]-2*m) - f1 = Frame(m, m+0.5*cm, cw-0.75*cm, ch-1*cm, id='F1', - leftPadding=0, topPadding=0, rightPadding=0, bottomPadding=0, - showBoundary=True - ) - f2 = Frame(cw+2.7*cm, m+0.5*cm, cw-0.75*cm, ch-1*cm, id='F2', - leftPadding=0, topPadding=0, rightPadding=0, bottomPadding=0, - showBoundary=True - ) - apply(BaseDocTemplate.__init__, (self, filename), kw) - template = PageTemplate('template', [f1, f2]) - self.addPageTemplates(template) - - def test(): - from reportlab.platypus.paragraph import Paragraph as platypus_Paragraph - from wordaxe.DCWHyphenator import DCWHyphenator - wordaxe.hyphRegistry["DE"] = DCWHyphenator("DE") - stylesheet = getSampleStyleSheet() - for indx, klass in enumerate([Paragraph, platypus_Paragraph]): - normal = stylesheet['BodyText'] - normal.fontName = "Helvetica" - normal.fontSize = 12 - normal.leading = 16 - if klass is Paragraph: - normal.language = 'DE' - normal.hyphenation = True - normal.alignment = TA_JUSTIFY - normal.firstLineIndent = 15*pt - normal.leftIndent = 20*pt - - text = """Bedauerlicherweise ist ein Donaudampfschiffkapitän auch nur ein Dampfschiffkapitän.""" - # strange behaviour when next line uncommented - text = " ".join(['%s' % w for w in text.split()]) - - text="""Das jeweils aktuelle Release der Software kann aber von der entsprechenden - SourceForge Download-Seite - heruntergeladen werden. Die allerneueste in Entwicklung befindliche Version - wird im Sourceforge Subversion-Repository verwaltet. - """.replace("\n"," ") - - story = [] - #story.append(Paragraph(text, style=normal)) - story.append(klass(u"Eine Aufzählung, bei der der Text hoffentlich etwas länger als eine Zeile ist.", style=normal, bulletText="\xe2\x80\xa2")) - #story.append(klass(u"Silbentrennungsverfahren helfen dabei, extrem lange Donaudampfschiffe in handliche Schiffchen aufzuteilen. " * 10, style=normal)) - #story.append(klass(u"Silbentrennungsverfahren helfen dabei, extrem lange Donaudampfschiffe in handliche Schiffchen aufzuteilen.", style=normal, bulletText="\xe2\x80\xa2")) - doc = TwoColumnDocTemplate(("test_NewParagraph_%d.pdf" %indx), pagesize=PAGESIZE) - doc.build(story) - - test() - diff --git a/dist-packages/wordaxe/wordaxe/rl/__init__.py b/dist-packages/wordaxe/wordaxe/rl/__init__.py deleted file mode 100755 index 75288316c..000000000 --- a/dist-packages/wordaxe/wordaxe/rl/__init__.py +++ /dev/null @@ -1,12 +0,0 @@ -# -*- coding: iso-8859-1 -*- - -__license__=""" - Copyright 2004-2008 Henning von Bargen (henning.vonbargen arcor.de) - This software is dual-licenced under the Apache 2.0 and the - 2-clauses BSD license. For details, see license.txt -""" - -__version__=''' $Id: __init__.py,v 1.2 2004/05/31 22:22:12 hvbargen Exp $ ''' -__doc__='Hyphenation support for ReportLab' - -USE_NEW_PARAGRAPH = True diff --git a/dist-packages/wordaxe/wordaxe/rl/graphdocpy.py b/dist-packages/wordaxe/wordaxe/rl/graphdocpy.py deleted file mode 100755 index 1d1d32be1..000000000 --- a/dist-packages/wordaxe/wordaxe/rl/graphdocpy.py +++ /dev/null @@ -1,984 +0,0 @@ -#!/usr/bin/env python -#Copyright ReportLab Europe Ltd. 2000-2004 -#see license.txt for license details -#history http://www.reportlab.co.uk/cgi-bin/viewcvs.cgi/public/reportlab/trunk/reportlab/tools/docco/graphdocpy.py - -"""Generate documentation for reportlab.graphics classes. - -Type the following for usage info: - - python graphdocpy.py -h -""" - - -__version__ = '0.8' - - -import sys -sys.path.insert(0, '.') -import os, re, types, string, getopt, pickle, copy, time, pprint, traceback -from string import find, join, split, replace, expandtabs, rstrip -import reportlab -from reportlab import rl_config - -from docpy import PackageSkeleton0, ModuleSkeleton0 -from docpy import DocBuilder0, PdfDocBuilder0, HtmlDocBuilder0 -from docpy import htmlescape, htmlrepr, defaultformat, \ - getdoc, reduceDocStringLength -from docpy import makeHtmlSection, makeHtmlSubSection, \ - makeHtmlInlineImage - -from reportlab.lib.units import inch, cm -from reportlab.lib.pagesizes import A4 -from reportlab.lib import colors -from reportlab.lib.enums import TA_CENTER, TA_LEFT -from reportlab.lib.utils import getStringIO -#from StringIO import StringIO -#getStringIO=StringIO -# HVB: Changed für Hyphenation support -from wordaxe.rl.styles import getSampleStyleSheet, ParagraphStyle -from reportlab.pdfgen import canvas -from reportlab.platypus.flowables import Flowable, Spacer -# HVB: Changed für Hyphenation support -from wordaxe.rl.paragraph import Paragraph -from reportlab.platypus.tableofcontents import TableOfContents -from reportlab.platypus.flowables \ - import Flowable, Preformatted,Spacer, Image, KeepTogether, PageBreak -from reportlab.platypus.xpreformatted import XPreformatted -from reportlab.platypus.frames import Frame -from reportlab.platypus.doctemplate \ - import PageTemplate, BaseDocTemplate -from reportlab.platypus.tables import TableStyle, Table -from reportlab.graphics.shapes import NotImplementedError -import inspect - -# Needed to draw Widget/Drawing demos. - -from reportlab.graphics.widgetbase import Widget -from reportlab.graphics.shapes import Drawing -from reportlab.graphics import shapes -from reportlab.graphics import renderPDF - -VERBOSE = rl_config.verbose -VERIFY = 1 - -_abstractclasserr_re = re.compile(r'^\s*abstract\s*class\s*(\w+)\s*instantiated',re.I) - -#################################################################### -# -# Stuff needed for building PDF docs. -# -#################################################################### - -def mainPageFrame(canvas, doc): - "The page frame used for all PDF documents." - - canvas.saveState() - - pageNumber = canvas.getPageNumber() - canvas.line(2*cm, A4[1]-2*cm, A4[0]-2*cm, A4[1]-2*cm) - canvas.line(2*cm, 2*cm, A4[0]-2*cm, 2*cm) - if pageNumber > 1: - canvas.setFont('Times-Roman', 12) - canvas.drawString(4 * inch, cm, "%d" % pageNumber) - if hasattr(canvas, 'headerLine'): # hackish - # HVB: Changed für Hyphenation support - # headerline = string.join(canvas.headerLine, ' \xc2\x8d ') - headerline = string.join(canvas.headerLine, ' \xc2\x8d '.decode('utf8')) - canvas.drawString(2*cm, A4[1]-1.75*cm, headerline) - - canvas.setFont('Times-Roman', 8) - msg = "Generated with docpy. See http://www.reportlab.com!" - canvas.drawString(2*cm, 1.65*cm, msg) - - canvas.restoreState() - - -class MyTemplate(BaseDocTemplate): - "The document template used for all PDF documents." - - _invalidInitArgs = ('pageTemplates',) - - def __init__(self, filename, **kw): - frame1 = Frame(2.5*cm, 2.5*cm, 15*cm, 25*cm, id='F1') - self.allowSplitting = 0 - apply(BaseDocTemplate.__init__, (self, filename), kw) - self.addPageTemplates(PageTemplate('normal', [frame1], mainPageFrame)) - - def afterFlowable(self, flowable): - "Takes care of header line, TOC and outline entries." - - if flowable.__class__.__name__ == 'Paragraph': - f = flowable - - # Build a list of heading parts. - # So far, this is the *last* item on the *previous* page... - if f.style.name[:8] == 'Heading0': - self.canv.headerLine = [f.text] # hackish - elif f.style.name[:8] == 'Heading1': - if len(self.canv.headerLine) == 2: - del self.canv.headerLine[-1] - elif len(self.canv.headerLine) == 3: - del self.canv.headerLine[-1] - del self.canv.headerLine[-1] - self.canv.headerLine.append(f.text) - elif f.style.name[:8] == 'Heading2': - if len(self.canv.headerLine) == 3: - del self.canv.headerLine[-1] - self.canv.headerLine.append(f.text) - - if f.style.name[:7] == 'Heading': - # Register TOC entries. - headLevel = int(f.style.name[7:]) - self.notify('TOCEntry', (headLevel, flowable.getPlainText(), self.page)) - - # Add PDF outline entries. - c = self.canv - title = f.text - key = str(hash(f)) - lev = int(f.style.name[7:]) - try: - if lev == 0: - isClosed = 0 - else: - isClosed = 1 - c.bookmarkPage(key) - c.addOutlineEntry(title, key, level=lev, closed=isClosed) - c.showOutline() - except: - if VERBOSE: - # AR hacking in exception handlers - print 'caught exception in MyTemplate.afterFlowable with heading text %s' % f.text - traceback.print_exc() - else: - pass - - -#################################################################### -# -# Utility functions -# -#################################################################### -def indentLevel(line, spacesPerTab=4): - """Counts the indent levels on the front. - - It is assumed that one tab equals 4 spaces. - """ - - x = 0 - nextTab = 4 - for ch in line: - if ch == ' ': - x = x + 1 - elif ch == '\t': - x = nextTab - nextTab = x + spacesPerTab - else: - return x - - -assert indentLevel('hello') == 0, 'error in indentLevel' -assert indentLevel(' hello') == 1, 'error in indentLevel' -assert indentLevel(' hello') == 2, 'error in indentLevel' -assert indentLevel(' hello') == 3, 'error in indentLevel' -assert indentLevel('\thello') == 4, 'error in indentLevel' -assert indentLevel(' \thello') == 4, 'error in indentLevel' -assert indentLevel('\t hello') == 5, 'error in indentLevel' - -#################################################################### -# -# Special-purpose document builders -# -#################################################################### - -class GraphPdfDocBuilder0(PdfDocBuilder0): - """A PDF document builder displaying widgets and drawings. - - This generates a PDF file where only methods named 'demo' are - listed for any class C. If C happens to be a subclass of Widget - and has a 'demo' method, this method is assumed to generate and - return a sample widget instance, that is then appended graphi- - cally to the Platypus story. - - Something similar happens for functions. If their names start - with 'sample' they are supposed to generate and return a sample - drawing. This is then taken and appended graphically to the - Platypus story, as well. - """ - - fileSuffix = '.pdf' - - def begin(self, name='', typ=''): - styleSheet = getSampleStyleSheet() - self.code = styleSheet['Code'] - self.bt = styleSheet['BodyText'] - self.story = [] - - # Cover page - t = time.gmtime(time.time()) - timeString = time.strftime("%Y-%m-%d %H:%M", t) - self.story.append(Paragraph('Documentation for %s "%s"' % (typ, name), self.bt)) - self.story.append(Paragraph('Generated by: graphdocpy.py version %s' % __version__, self.bt)) - self.story.append(Paragraph('Date generated: %s' % timeString, self.bt)) - self.story.append(Paragraph('Format: PDF', self.bt)) - self.story.append(PageBreak()) - - # Table of contents - toc = TableOfContents() - self.story.append(toc) - self.story.append(PageBreak()) - - - def end(self, fileName=None): - if fileName: # overrides output path - self.outPath = fileName - elif self.packageName: - self.outPath = self.packageName + self.fileSuffix - elif self.skeleton: - self.outPath = self.skeleton.getModuleName() + self.fileSuffix - else: - self.outPath = '' - - if self.outPath: - doc = MyTemplate(self.outPath) - doc.multiBuild(self.story) - - - def beginModule(self, name, doc, imported): - story = self.story - bt = self.bt - - # Defer displaying the module header info to later... - self.shouldDisplayModule = (name, doc, imported) - self.hasDisplayedModule = 0 - - - def endModule(self, name, doc, imported): - if self.hasDisplayedModule: - DocBuilder0.endModule(self, name, doc, imported) - - - def beginClasses(self, names): - # Defer displaying the module header info to later... - if self.shouldDisplayModule: - self.shouldDisplayClasses = names - - - # Skip all methods. - def beginMethod(self, name, doc, sig): - pass - - - def endMethod(self, name, doc, sig): - pass - - - def beginClass(self, name, doc, bases): - "Append a graphic demo of a Widget or Drawing at the end of a class." - - if VERBOSE: - print 'GraphPdfDocBuilder.beginClass(%s...)' % name - - aClass = eval('self.skeleton.moduleSpace.' + name) - if issubclass(aClass, Widget): - if self.shouldDisplayModule: - modName, modDoc, imported = self.shouldDisplayModule - self.story.append(Paragraph(modName, self.makeHeadingStyle(self.indentLevel-2, 'module'))) - self.story.append(XPreformatted(modDoc, self.bt)) - self.shouldDisplayModule = 0 - self.hasDisplayedModule = 1 - if self.shouldDisplayClasses: - self.story.append(Paragraph('Classes', self.makeHeadingStyle(self.indentLevel-1))) - self.shouldDisplayClasses = 0 - PdfDocBuilder0.beginClass(self, name, doc, bases) - self.beginAttributes(aClass) - - elif issubclass(aClass, Drawing): - if self.shouldDisplayModule: - modName, modDoc, imported = self.shouldDisplayModule - self.story.append(Paragraph(modName, self.makeHeadingStyle(self.indentLevel-2, 'module'))) - self.story.append(XPreformatted(modDoc, self.bt)) - self.shouldDisplayModule = 0 - self.hasDisplayedModule = 1 - if self.shouldDisplayClasses: - self.story.append(Paragraph('Classes', self.makeHeadingStyle(self.indentLevel-1))) - self.shouldDisplayClasses = 0 - PdfDocBuilder0.beginClass(self, name, doc, bases) - - - def beginAttributes(self, aClass): - "Append a list of annotated attributes of a class." - - self.story.append(Paragraph( - 'Public Attributes', - self.makeHeadingStyle(self.indentLevel+1))) - - map = aClass._attrMap - if map: - map = map.items() - map.sort() - else: - map = [] - for name, typ in map: - if typ != None: - if hasattr(typ, 'desc'): - desc = typ.desc - else: - desc = '%s' % typ.__class__.__name__ - else: - desc = 'None' - self.story.append(Paragraph( - "%s %s" % (name, desc), self.bt)) - self.story.append(Paragraph("", self.bt)) - - - def endClass(self, name, doc, bases): - "Append a graphic demo of a Widget or Drawing at the end of a class." - - PdfDocBuilder0.endClass(self, name, doc, bases) - - aClass = eval('self.skeleton.moduleSpace.' + name) - if hasattr(aClass, '_nodoc'): - pass - elif issubclass(aClass, Widget): - try: - widget = aClass() - except AssertionError, err: - if _abstractclasserr_re.match(str(err)): return - raise - self.story.append(Spacer(0*cm, 0.5*cm)) - self._showWidgetDemoCode(widget) - self.story.append(Spacer(0*cm, 0.5*cm)) - self._showWidgetDemo(widget) - self.story.append(Spacer(0*cm, 0.5*cm)) - self._showWidgetProperties(widget) - self.story.append(PageBreak()) - elif issubclass(aClass, Drawing): - drawing = aClass() - self.story.append(Spacer(0*cm, 0.5*cm)) - self._showDrawingCode(drawing) - self.story.append(Spacer(0*cm, 0.5*cm)) - self._showDrawingDemo(drawing) - self.story.append(Spacer(0*cm, 0.5*cm)) - - - def beginFunctions(self, names): - srch = string.join(names, ' ') - if string.find(string.join(names, ' '), ' sample') > -1: - PdfDocBuilder0.beginFunctions(self, names) - - - # Skip non-sample functions. - def beginFunction(self, name, doc, sig): - "Skip function for 'uninteresting' names." - - if name[:6] == 'sample': - PdfDocBuilder0.beginFunction(self, name, doc, sig) - - - def endFunction(self, name, doc, sig): - "Append a drawing to the story for special function names." - - if name[:6] != 'sample': - return - - if VERBOSE: - print 'GraphPdfDocBuilder.endFunction(%s...)' % name - PdfDocBuilder0.endFunction(self, name, doc, sig) - aFunc = eval('self.skeleton.moduleSpace.' + name) - drawing = aFunc() - - self.story.append(Spacer(0*cm, 0.5*cm)) - self._showFunctionDemoCode(aFunc) - self.story.append(Spacer(0*cm, 0.5*cm)) - self._showDrawingDemo(drawing) - - self.story.append(PageBreak()) - - - def _showFunctionDemoCode(self, function): - """Show a demo code of the function generating the drawing.""" - # Heading - self.story.append(Paragraph("Example", self.bt)) - self.story.append(Paragraph("", self.bt)) - - # Sample code - codeSample = inspect.getsource(function) - self.story.append(Preformatted(codeSample, self.code)) - - - def _showDrawingCode(self, drawing): - """Show code of the drawing class.""" - # Heading - #className = drawing.__class__.__name__ - self.story.append(Paragraph("Example", self.bt)) - - # Sample code - codeSample = inspect.getsource(drawing.__class__.__init__) - self.story.append(Preformatted(codeSample, self.code)) - - - def _showDrawingDemo(self, drawing): - """Show a graphical demo of the drawing.""" - - # Add the given drawing to the story. - # Ignored if no GD rendering available - # or the demo method does not return a drawing. - try: - flo = renderPDF.GraphicsFlowable(drawing) - self.story.append(Spacer(6,6)) - self.story.append(flo) - self.story.append(Spacer(6,6)) - except: - if VERBOSE: - print 'caught exception in _showDrawingDemo' - traceback.print_exc() - else: - pass - - - def _showWidgetDemo(self, widget): - """Show a graphical demo of the widget.""" - - # Get a demo drawing from the widget and add it to the story. - # Ignored if no GD rendering available - # or the demo method does not return a drawing. - try: - if VERIFY: - widget.verify() - drawing = widget.demo() - flo = renderPDF.GraphicsFlowable(drawing) - self.story.append(Spacer(6,6)) - self.story.append(flo) - self.story.append(Spacer(6,6)) - except: - if VERBOSE: - print 'caught exception in _showWidgetDemo' - traceback.print_exc() - else: - pass - - - def _showWidgetDemoCode(self, widget): - """Show a demo code of the widget.""" - # Heading - #className = widget.__class__.__name__ - self.story.append(Paragraph("Example", self.bt)) - - # Sample code - codeSample = inspect.getsource(widget.__class__.demo) - self.story.append(Preformatted(codeSample, self.code)) - - - def _showWidgetProperties(self, widget): - """Dump all properties of a widget.""" - - props = widget.getProperties() - keys = props.keys() - keys.sort() - lines = [] - for key in keys: - value = props[key] - - f = getStringIO() - pprint.pprint(value, f) - value = f.getvalue()[:-1] - valueLines = string.split(value, '\n') - for i in range(1, len(valueLines)): - valueLines[i] = ' '*(len(key)+3) + valueLines[i] - value = string.join(valueLines, '\n') - - lines.append('%s = %s' % (key, value)) - - text = join(lines, '\n') - self.story.append(Paragraph("Properties of Example Widget", self.bt)) - self.story.append(Paragraph("", self.bt)) - self.story.append(Preformatted(text, self.code)) - - -class GraphHtmlDocBuilder0(HtmlDocBuilder0): - "A class to write the skeleton of a Python source." - - fileSuffix = '.html' - - def beginModule(self, name, doc, imported): - # Defer displaying the module header info to later... - self.shouldDisplayModule = (name, doc, imported) - self.hasDisplayedModule = 0 - - - def endModule(self, name, doc, imported): - if self.hasDisplayedModule: - HtmlDocBuilder0.endModule(self, name, doc, imported) - - - def beginClasses(self, names): - # Defer displaying the module header info to later... - if self.shouldDisplayModule: - self.shouldDisplayClasses = names - - - # Skip all methods. - def beginMethod(self, name, doc, sig): - pass - - - def endMethod(self, name, doc, sig): - pass - - - def beginClass(self, name, doc, bases): - "Append a graphic demo of a widget at the end of a class." - - aClass = eval('self.skeleton.moduleSpace.' + name) - if issubclass(aClass, Widget): - if self.shouldDisplayModule: - modName, modDoc, imported = self.shouldDisplayModule - self.outLines.append('

%s

' % modName) - self.outLines.append('
%s
' % modDoc) - self.shouldDisplayModule = 0 - self.hasDisplayedModule = 1 - if self.shouldDisplayClasses: - self.outLines.append('

Classes

') - self.shouldDisplayClasses = 0 - - HtmlDocBuilder0.beginClass(self, name, doc, bases) - - - def endClass(self, name, doc, bases): - "Append a graphic demo of a widget at the end of a class." - - HtmlDocBuilder0.endClass(self, name, doc, bases) - - aClass = eval('self.skeleton.moduleSpace.' + name) - if issubclass(aClass, Widget): - widget = aClass() - self._showWidgetDemoCode(widget) - self._showWidgetDemo(widget) - self._showWidgetProperties(widget) - - - def beginFunctions(self, names): - if string.find(string.join(names, ' '), ' sample') > -1: - HtmlDocBuilder0.beginFunctions(self, names) - - - # Skip non-sample functions. - def beginFunction(self, name, doc, sig): - "Skip function for 'uninteresting' names." - - if name[:6] == 'sample': - HtmlDocBuilder0.beginFunction(self, name, doc, sig) - - - def endFunction(self, name, doc, sig): - "Append a drawing to the story for special function names." - - if name[:6] != 'sample': - return - - HtmlDocBuilder0.endFunction(self, name, doc, sig) - aFunc = eval('self.skeleton.moduleSpace.' + name) - drawing = aFunc() - - self._showFunctionDemoCode(aFunc) - self._showDrawingDemo(drawing, aFunc.__name__) - - - def _showFunctionDemoCode(self, function): - """Show a demo code of the function generating the drawing.""" - # Heading - self.outLines.append('

Example

') - - # Sample code - codeSample = inspect.getsource(function) - self.outLines.append('
%s
' % codeSample) - - - def _showDrawingDemo(self, drawing, name): - """Show a graphical demo of the drawing.""" - - # Add the given drawing to the story. - # Ignored if no GD rendering available - # or the demo method does not return a drawing. - try: - from reportlab.graphics import renderPM - modName = self.skeleton.getModuleName() - path = '%s-%s.jpg' % (modName, name) - renderPM.drawToFile(drawing, path, fmt='JPG') - self.outLines.append('

Demo

') - self.outLines.append(makeHtmlInlineImage(path)) - except: - if VERBOSE: - print 'caught exception in GraphHTMLDocBuilder._showDrawingDemo' - traceback.print_exc() - else: - pass - - - def _showWidgetDemo(self, widget): - """Show a graphical demo of the widget.""" - - # Get a demo drawing from the widget and add it to the story. - # Ignored if no GD rendering available - # or the demo method does not return a drawing. - try: - from reportlab.graphics import renderPM - drawing = widget.demo() - if VERIFY: - widget.verify() - modName = self.skeleton.getModuleName() - path = '%s-%s.jpg' % (modName, widget.__class__.__name__) - renderPM.drawToFile(drawing, path, fmt='JPG') - self.outLines.append('

Demo

') - self.outLines.append(makeHtmlInlineImage(path)) - except: - if VERBOSE: - - print 'caught exception in GraphHTMLDocBuilder._showWidgetDemo' - traceback.print_exc() - else: - pass - - - def _showWidgetDemoCode(self, widget): - """Show a demo code of the widget.""" - # Heading - #className = widget.__class__.__name__ - self.outLines.append('

Example Code

') - - # Sample code - codeSample = inspect.getsource(widget.__class__.demo) - self.outLines.append('
%s
' % codeSample) - self.outLines.append('') - - - def _showWidgetProperties(self, widget): - """Dump all properties of a widget.""" - - props = widget.getProperties() - keys = props.keys() - keys.sort() - lines = [] - for key in keys: - value = props[key] - - # Method 3 - f = getStringIO() - pprint.pprint(value, f) - value = f.getvalue()[:-1] - valueLines = string.split(value, '\n') - for i in range(1, len(valueLines)): - valueLines[i] = ' '*(len(key)+3) + valueLines[i] - value = string.join(valueLines, '\n') - - lines.append('%s = %s' % (key, value)) - text = join(lines, '\n') - self.outLines.append('

Properties of Example Widget

') - self.outLines.append('
%s
' % text) - self.outLines.append('') - - -# Highly experimental! -class PlatypusDocBuilder0(DocBuilder0): - "Document the skeleton of a Python module as a Platypus story." - - fileSuffix = '.pps' # A pickled Platypus story. - - def begin(self, name='', typ=''): - styleSheet = getSampleStyleSheet() - self.code = styleSheet['Code'] - self.bt = styleSheet['BodyText'] - self.story = [] - - - def end(self): - if self.packageName: - self.outPath = self.packageName + self.fileSuffix - elif self.skeleton: - self.outPath = self.skeleton.getModuleName() + self.fileSuffix - else: - self.outPath = '' - - if self.outPath: - f = open(self.outPath, 'w') - pickle.dump(self.story, f) - - - def beginPackage(self, name): - DocBuilder0.beginPackage(self, name) - self.story.append(Paragraph(name, self.bt)) - - - def beginModule(self, name, doc, imported): - story = self.story - bt = self.bt - - story.append(Paragraph(name, bt)) - story.append(XPreformatted(doc, bt)) - - - def beginClasses(self, names): - self.story.append(Paragraph('Classes', self.bt)) - - - def beginClass(self, name, doc, bases): - bt = self.bt - story = self.story - if bases: - bases = map(lambda b:b.__name__, bases) # hack - story.append(Paragraph('%s(%s)' % (name, join(bases, ', ')), bt)) - else: - story.append(Paragraph(name, bt)) - - story.append(XPreformatted(doc, bt)) - - - def beginMethod(self, name, doc, sig): - bt = self.bt - story = self.story - story.append(Paragraph(name+sig, bt)) - story.append(XPreformatted(doc, bt)) - - - def beginFunctions(self, names): - if names: - self.story.append(Paragraph('Functions', self.bt)) - - - def beginFunction(self, name, doc, sig): - bt = self.bt - story = self.story - story.append(Paragraph(name+sig, bt)) - story.append(XPreformatted(doc, bt)) - - -#################################################################### -# -# Main -# -#################################################################### - -def printUsage(): - """graphdocpy.py - Automated documentation for the RL Graphics library. - -Usage: python graphdocpy.py [options] - - [options] - -h Print this help message. - - -f name Use the document builder indicated by 'name', - e.g. Html, Pdf. - - -m module Generate document for module named 'module'. - 'module' may follow any of these forms: - - docpy.py - - docpy - - c:\\test\\docpy - and can be any of these: - - standard Python modules - - modules in the Python search path - - modules in the current directory - - -p package Generate document for package named 'package' - (default is 'reportlab.graphics'). - 'package' may follow any of these forms: - - reportlab - - reportlab.graphics.charts - - c:\\test\\reportlab - and can be any of these: - - standard Python packages (?) - - packages in the Python search path - - packages in the current directory - - -s Silent mode (default is unset). - -Examples: - - python graphdocpy.py reportlab.graphics - python graphdocpy.py -m signsandsymbols.py -f Pdf - python graphdocpy.py -m flags.py -f Html - python graphdocpy.py -m barchart1.py -""" - - -# The following functions, including main(), are actually -# the same as in docpy.py (except for some defaults). - -def documentModule0(pathOrName, builder, opts={}): - """Generate documentation for one Python file in some format. - - This handles Python standard modules like string, custom modules - on the Python search path like e.g. docpy as well as modules - specified with their full path like C:/tmp/junk.py. - - The doc file will always be saved in the current directory with - a basename equal to that of the module, e.g. docpy. - """ - cwd = os.getcwd() - - # Append directory to Python search path if we get one. - dirName = os.path.dirname(pathOrName) - if dirName: - sys.path.append(dirName) - - # Remove .py extension from module name. - if pathOrName[-3:] == '.py': - modname = pathOrName[:-3] - else: - modname = pathOrName - - # Remove directory paths from module name. - if dirName: - modname = os.path.basename(modname) - - # Load the module. - try: - module = __import__(modname) - except: - print 'Failed to import %s.' % modname - os.chdir(cwd) - return - - # Do the real documentation work. - s = ModuleSkeleton0() - s.inspect(module) - builder.write(s) - - # Remove appended directory from Python search path if we got one. - if dirName: - del sys.path[-1] - - os.chdir(cwd) - - -def _packageWalkCallback((builder, opts), dirPath, files): - "A callback function used when waking over a package tree." - #must CD into a directory to document the module correctly - cwd = os.getcwd() - os.chdir(dirPath) - - - # Skip __init__ files. - files = filter(lambda f:f != '__init__.py', files) - - files = filter(lambda f:f[-3:] == '.py', files) - for f in files: - path = os.path.join(dirPath, f) -## if not opts.get('isSilent', 0): -## print path - builder.indentLevel = builder.indentLevel + 1 - #documentModule0(path, builder) - documentModule0(f, builder) - builder.indentLevel = builder.indentLevel - 1 - #CD back out - os.chdir(cwd) - -def documentPackage0(pathOrName, builder, opts={}): - """Generate documentation for one Python package in some format. - - 'pathOrName' can be either a filesystem path leading to a Python - package or package name whose path will be resolved by importing - the top-level module. - - The doc file will always be saved in the current directory with - a basename equal to that of the package, e.g. reportlab.lib. - """ - - # Did we get a package path with OS-dependant seperators...? - if os.sep in pathOrName: - path = pathOrName - name = os.path.splitext(os.path.basename(path))[0] - # ... or rather a package name? - else: - name = pathOrName - package = __import__(name) - # Some special care needed for dotted names. - if '.' in name: - subname = 'package' + name[find(name, '.'):] - package = eval(subname) - path = os.path.dirname(package.__file__) - - cwd = os.getcwd() - os.chdir(path) - builder.beginPackage(name) - os.path.walk(path, _packageWalkCallback, (builder, opts)) - builder.endPackage(name) - os.chdir(cwd) - - -def makeGraphicsReference(outfilename): - "Make graphics_reference.pdf" - builder = GraphPdfDocBuilder0() - - builder.begin(name='reportlab.graphics', typ='package') - documentPackage0('reportlab.graphics', builder, {'isSilent': 0}) - builder.end(outfilename) - print 'made graphics reference in %s' % outfilename - -def main(): - "Handle command-line options and trigger corresponding action." - - opts, args = getopt.getopt(sys.argv[1:], 'hsf:m:p:') - - # Make an options dictionary that is easier to use. - optsDict = {} - for k, v in opts: - optsDict[k] = v - hasOpt = optsDict.has_key - - # On -h print usage and exit immediately. - if hasOpt('-h'): - print printUsage.__doc__ - sys.exit(0) - - # On -s set silent mode. - isSilent = hasOpt('-s') - - # On -f set the appropriate DocBuilder to use or a default one. - builder = { 'Pdf': GraphPdfDocBuilder0, - 'Html': GraphHtmlDocBuilder0, - }[optsDict.get('-f', 'Pdf')]() - - # Set default module or package to document. - if not hasOpt('-p') and not hasOpt('-m'): - optsDict['-p'] = 'reportlab.graphics' - - # Save a few options for further use. - options = {'isSilent':isSilent} - - # Now call the real documentation functions. - if hasOpt('-m'): - nameOrPath = optsDict['-m'] - if not isSilent: - print "Generating documentation for module %s..." % nameOrPath - builder.begin(name=nameOrPath, typ='module') - documentModule0(nameOrPath, builder, options) - elif hasOpt('-p'): - nameOrPath = optsDict['-p'] - if not isSilent: - print "Generating documentation for package %s..." % nameOrPath - builder.begin(name=nameOrPath, typ='package') - documentPackage0(nameOrPath, builder, options) - builder.end() - - if not isSilent: - print "Saved %s." % builder.outPath - - #if doing the usual, put a copy in docs - if builder.outPath == 'reportlab.graphics.pdf': - import shutil, reportlab - dst = os.path.join(os.path.dirname(reportlab.__file__),'docs','graphics_reference.pdf') - shutil.copyfile('reportlab.graphics.pdf', dst) - if not isSilent: - print 'copied to '+dst - -def makeSuite(): - "standard test harness support - run self as separate process" - from reportlab.test.utils import ScriptThatMakesFileTest - return ScriptThatMakesFileTest('tools/docco', - 'graphdocpy.py', - 'reportlab.graphics.pdf') - -if __name__ == '__main__': - main() diff --git a/dist-packages/wordaxe/wordaxe/rl/kerning_info.py b/dist-packages/wordaxe/wordaxe/rl/kerning_info.py deleted file mode 100755 index 097f58105..000000000 --- a/dist-packages/wordaxe/wordaxe/rl/kerning_info.py +++ /dev/null @@ -1,132 +0,0 @@ -#!/usr/bin/env python -# -*- coding: iso-8859-1 -*- -# Kerning info - -import os -import sys -from reportlab.pdfbase import ttfonts -import struct -import bisect - -def extract_kerning_table(face): - "Extract the Kerning table from a TrueType font" - try: - raw_data = face.get_table("kern") - except KeyError: - # Font does not contain a kerning table - return [] - version, nTables = struct.unpack_from(">HH", raw_data) - #print "version:", version - #print "nTables:", nTables - offset = 4 - for subtable_no in range(nTables): - #print "subtable %s" % subtable_no - version, length, coverage = struct.unpack_from(">HHH", raw_data, offset) - #print "version:", version - #print "length:", length - #print "coverage:",coverage - horizontal = bool(coverage & 1) - minimum = bool(coverage & 2) - cross_stream = bool(coverage & 4) - override = bool(coverage & 8) - format = coverage >> 8 - #print "horizontal:", horizontal - #print "minimum:", minimum - #print "cross_stream:", cross_stream - #print "override", override - #print "format:", format - o = offset + 6 - if format == 0: - nPairs, searchRange, entrySelector, rangeShift = struct.unpack_from(">4H", raw_data, o) - #print "nPairs:", nPairs - #print "searchRange:", searchRange - #print "entrySelector:", entrySelector - #print "rangeShift:", rangeShift - o += 8 - # TODO can this be done more efficient using struct? - pairs = [] - for entry in range(nPairs): - pair = struct.unpack_from(">HHh", raw_data, o) - pairs.append(pair) - o += 6 - #print "pairs:", pairs - else: - print "Kerning subtable format %s not supported." % format - offset += length - assert o == offset - return pairs - -def glyf(face, ch): - "Glyph index of ch" - return face.charToGlyph[ord(ch)] - -def kerning(face, a, b): - """ - Returns the kerning for the characters a and b and the font face. - The return value is given in em/1000(?). - """ - unitsPerEm = getattr(face, "unitsPerEm", 0) - if not unitsPerEm: - return 0. - scale = lambda x, unitsPerEm=unitsPerEm: x * 1000. / unitsPerEm - kerning_table = getattr(face, "kerning_table", None) - if kerning_table is None: - kerning_table = extract_kerning_table(face) - setattr(face, "kerning_table", kerning_table) - gl_a = glyf(face, a) - gl_b = glyf(face, b) - i = bisect.bisect(kerning_table, (gl_a, gl_b, None)) - if i == len(kerning_table): - return 0. - l, r, k = kerning_table[i] - if l == gl_a and r == gl_b: - return scale(k) - else: - return 0. - -def kerning_pairs(face, s): - """ - Compute the Kerning values for the character-pairs of the string s. - """ - if not s: - return [] - result = [] - old = s[0] - for ch in s[1:]: - result.append(kerning(face, old, ch)) - old = ch - #print "kerning_pairs(%r) = %r" % (s, result) - return result - -#print kerning_pairs(font.face, "MAYA") -#print kerning_pairs(font.face, "VASE") -#print kerning_pairs(font.face, "Vaya con dios") - -def stringWidth_kerning(font, text, size, encoding='utf-8'): - """ - see TTFont.pyStringWidth, but takes kerning into account - """ - if not isinstance(text,unicode): - text = unicode(text, encoding or 'utf-8') # encoding defaults to utf-8 - face = font.face - g = face.charWidths.get - dw = face.defaultWidth - kp = kerning_pairs(face, text) - return 0.001*size*(sum([g(ord(u),dw) for u in text]) + sum(kp)) - - -import new -ttfonts.TTFont.stringWidth_kerning = new.instancemethod(stringWidth_kerning,None,ttfonts.TTFont) - -if __name__ == "__main__": - fname = r"c:\windows\fonts\arial.ttf" - font = ttfonts.TTFont("Arial", fname) - face = font.face - print kerning(font.face, 'A', 'V') - print kerning(font.face, 'A', 'A') - print kerning(font.face, 'A', 'v') - for text in ["Maya", "Vase", "Kandelaber", "Kronjuwelen", "Vaya con dios" - ]: - print text, - print "without Kerning:", font.stringWidth(text, 12), - print "with Kerning:", font.stringWidth_kerning(text, 12) diff --git a/dist-packages/wordaxe/wordaxe/rl/para_fragments.py b/dist-packages/wordaxe/wordaxe/rl/para_fragments.py deleted file mode 100755 index 23f08a879..000000000 --- a/dist-packages/wordaxe/wordaxe/rl/para_fragments.py +++ /dev/null @@ -1,360 +0,0 @@ -# -*- coding: utf-8 -*- - -# Helper classes for the new Paragraph-Implementation - -from copy import copy - -import reportlab.pdfbase.pdfmetrics as pdfmetrics -from reportlab.lib.abag import ABag - -from wordaxe.hyphen import HyphenationPoint, SHY, HyphenatedWord -from wordaxe.rl.kerning_info import kerning_pairs - -class Style(ABag): - "This is used to store style attributes." - -class Fragment(object): - "A fragment representing a piece of text or other information" - -class StyledFragment(Fragment): - def __init__(self, style): - self.style = style - - @staticmethod - def str_width(text, style): - "Compute the width of a styled text" - return pdfmetrics.stringWidth(text, style.fontName, style.fontSize) - - def __repr__(self): - return self.__class__.__name__ - __str__ = __repr__ - - kerning_pairs = None - -class StyledText(StyledFragment): - "A string in some style" - def __init__(self, text, style, kerning): - assert isinstance(text, unicode) - super(StyledText, self).__init__(style) - self.text = text - self.width = self.str_width(text, style) - if kerning: - # Take kerning into account - font = pdfmetrics.getFont(style.fontName) - face = font.face - kp = kerning_pairs(face, text) - skp = sum(kp) - #print "skp=", skp - self.kerning_pairs = kp - self.width += 0.001*style.fontSize*skp - #print "Kerning!" - else: - self.kerning_pairs = None - if hasattr(style, "nobr"): - self.nobr = True - cbDefn = getattr(style,"cbDefn", None) - if cbDefn is not None and not self.width: - self.width = getattr(cbDefn, "width", 0) - self.ascent, self.descent = pdfmetrics.getAscentDescent(style.fontName, style.fontSize) - - def __str__(self): - return "ST(%s)" % self.text.encode("utf-8") - - __repr__ = __str__ - - @staticmethod - def fromParaFrag(frag): - "This allows to reuse the good old paraparser.py" - text = frag.text - if not isinstance(text, unicode): - text = unicode(text, "utf-8") - return StyledText(text, frag) #TODO kerning? - -class StyledWhiteSpace(StyledFragment): - "Used for every token that delimits words." - -class StyledSpace(StyledWhiteSpace): - "A spacer in some style" - def __init__(self, style, text=u" "): - super(StyledSpace, self).__init__(style) - self.text = unicode(text) - self.width = self.str_width(text, style) - - def __str__(self): - return "SP(%s)" % self.text.encode("utf-8") - - __repr__ = __str__ - -class StyledNewLine(StyledWhiteSpace): - "A new line" - - def __str__(self): - return "NL" - - def __init__(self, style): - super(StyledNewLine, self).__init__(style) - self.width = 0 - self.text = u"" - - __repr__ = __str__ - -class StyledWord(Fragment): - "A word compound of some styled strings" - - def __init__(self, fragments): - for frag in fragments: assert isinstance(frag, StyledText) - self.fragments = fragments - # Breite berechnen - self.text = u"".join([f.text for f in fragments]) - self.width = sum([f.width for f in fragments]) - for f in fragments: - if hasattr(f, "nobr"): - self.nobr = True - break - - def __str__(self): - return "SW(%s)" % self.text.encode("utf-8") - - __repr__ = __str__ - - def splitAt(self, hp): - """ - Splits the styled word at a given hyphenation point - (see wordaxe.hyphen). - The result is a tuple (left, right) of StyledWords. - Works just like HyphenatedWord.split, but for a StyledWord. - """ - - #print self, "splitAt", hp - assert isinstance(self.text, HyphenatedWord) - # first get the unstyled versions - ltext, rtext = self.text.split(hp) - #print " unstyled would return", ltext, rtext - if isinstance(hp, int): - indx = hp - nl = nr = 0 - sl = SHY - sr = u"" - else: - indx = hp.indx - nl = hp.nl - nr = hp.nr - sl = hp.sl - sr = hp.sr - lfrags = [] - rfrags = [] - n = 0 - stillLeftPart = True - firstRight = False - for frag in self.fragments: - if not isinstance(frag, StyledText): - if stillLeftPart: - lfrags.append(frag) - else: - rfrags.append(frag) - continue - text = frag.text - if stillLeftPart: - if len(text) < indx-n: - # fragment still before the hyphenation point - lfrags.append(frag) - n += len(text) - elif len(text) == indx-n: - # fragment boundary exactly at the hyphenation point - if nl>0: text = text[:-nl] - if sl: text += sl - if text is frag.text: - lfrags.append(frag) - else: - lfrags.append(StyledText(text, frag.style, bool(frag.kerning_pairs))) - n += len(text) - stillLeftPart = False - firstRight = True - else: - # fragment crosses the hyphenation point - n1 = indx-n - tl = text[:n1-nl] + sl - tr = sr + text[n1+nr:] - lfrags.append(StyledText(tl, frag.style, bool(frag.kerning_pairs))) - rfrags.append(StyledText(tr, frag.style, bool(frag.kerning_pairs))) - stillLeftPart = False - elif firstRight and (sr or nr): - rfrags.append(StyledText(sr + frag.text[nr:], frag.style, bool(frag.kerning_pairs))) - firstRight = False - else: - rfrags.append(frag) - left = StyledWord(lfrags) - right = StyledWord(rfrags) - #print "splitWordAt returns %s, %s" % (left, right) - assert left.text == ltext - assert unicode(right.text) == rtext - right.text = rtext - return left, right - - -class Line(object): - "A single line in the paragraph" - - def __init__(self, fragments, width, height, baseline, space_wasted, keepWhiteSpace): - for frag in fragments: assert isinstance(frag, Fragment) - self.fragments = fragments - self.width = width - #print fragments - self.height = height - self.baseline = baseline - self.keepWhiteSpace = keepWhiteSpace - assert 0 <= self.baseline - assert baseline <= height - self.space_wasted = space_wasted - # don't consider WhiteSpace at the start and end of the line - # for the width calculation - print_indx_start, print_indx_end = (0, len(self.fragments)) - if not keepWhiteSpace: - while print_indx_start < len(fragments) \ - and isinstance(fragments[print_indx_start], StyledWhiteSpace): - print_indx_start += 1 - while print_indx_end > print_indx_start \ - and isinstance(fragments[print_indx_end-1], StyledWhiteSpace): - print_indx_end -= 1 - # TODO: What to do with two differently styled spaces - # in the middle of the line? - self.print_indx_start = print_indx_start - self.print_indx_end = print_indx_end - #assert abs(self.width - sum(getattr(f,"width",0) for f in fragments[print_indx_start:print_indx_end])) <= 1e-5 - if not abs(self.width - sum(getattr(f,"width",0) for f in fragments[print_indx_start:print_indx_end])) <= 1e-5: - print "Assertion failure" - print "self.width=%f" % self.width - print "nFrags=%d" % len(fragments) - print "printrange=%d:%d" % (self.print_indx_start, print_indx_end) - print "printwidth=%f" % sum(getattr(f,"width",0) for f in fragments[print_indx_start:print_indx_end]) - for i,f in enumerate(fragments): print i, f, getattr(f, "width") - - # Compute font size - max_size = 0 - max_ascent = min_descent = 0 - for frag in self.iter_flattened_frags(): - if isinstance(frag, StyledText): - size = getattr(frag.style, "fontSize", 0) - ascent, descent = frag.ascent, frag.descent - if not max_size: - max_size = size - max_ascent = ascent - max_descent = descent - else: - max_size = max(max_size, size) - max_ascent = max(max_ascent, ascent) - min_descent = min(min_descent, descent) - self.fontSize = max_size - self.ascent = max_ascent - self.descent = min_descent - - def __str__(self): - return "Line(%s)" % (",".join(str(frag) for frag in self.fragments)) - - __repr__ = __str__ - - - def iter_flattened_frags(self): - """ - Returns the fragments flattened (one word may contribute several fragments). - """ - return flatten_frags(self.fragments) - - def iter_print_frags(self): - """ - Returns the fragments (to print) flattened (one word may contribute several fragments). - """ - return flatten_frags(self.fragments[self.print_indx_start:self.print_indx_end]) - -def frags_to_StyledFragments(frag_list, kerning): - """ - A helper function for frags_reportlab_to_wordaxe. - Yields StyledWords, StyledSpace and other entries, - """ - for f in frag_list: - #if hasattr(f, "cbDefn") and f.cbDefn.kind!="img": print "convert", f - if getattr(f, "lineBreak", False): - assert not f.text - yield StyledNewLine(f) - text = f.text - del f.text - if not isinstance(text, unicode): - text = unicode(text, "utf-8") - while u" " in text: - indxSpace = text.find(u" ") - if indxSpace > 0: - yield StyledText(text[:indxSpace], f, kerning) - indxNext = indxSpace - while text[indxNext:].startswith(u" "): - indxNext += 1 - yield StyledSpace(f) # we ignore repeated blanks - text = text[indxNext:] - if text or hasattr(f, "cbDefn"): - yield StyledText(text, f, kerning) - - -def frags_reportlab_to_wordaxe(frags, paragraph_style): - """ - Converts an iterator of reportlab frags to wordaxe frags. - Yields StyledWords, StyledSpace and other entries, - but StyledTexts are grouped to StyledWords. - """ - kerning = getattr(paragraph_style, "kerning", False) - word_frags = [] - - for frag in frags_to_StyledFragments(frags, kerning): - if isinstance(frag, StyledText): - word_frags.append(frag) - else: - if word_frags: - yield StyledWord(word_frags) - word_frags = [] - yield frag - if word_frags: - yield StyledWord(word_frags) - -def flatten_frags(frags): - """ - A helper function that flattens the StyledFragments, - i.e. StyledWords are split into StyledText fragments. - """ - for frag in frags: - if isinstance(frag, StyledWord): - for f in frag.fragments: - yield f - else: - yield frag - -def frags_wordaxe_to_reportlab(frags): - """ - Converts an iterator of wordaxe frags to reportlab frags. - Fragments of the same style will be joined. - """ - last_frag = None - for frag in flatten_frags(frags): - if last_frag is None or not hasattr(frag,"text") or last_style is not frag.style: - if last_frag is not None: - yield last_frag - last_style = frag.style - last_frag = copy(frag) - else: - if frag.kerning_pairs is not None or last_frag.kerning_pairs is not None: - # handle kerning pairs - lfkp = last_frag.kerning_pairs - fkp = frag.kerning_pairs - # TODO Special handling for the case that last_frag.text == "" or frag.text == "" - if lfkp is None: - if fkp is None: - pass - else: - last_frag.kerning_pairs = ([0.0] * len(last_frag.text)) + fkp - else: - if fkp is None: - last_frag.kerning_pairs = lfkp + ([0.0] * len(frag.text)) - else: - lfkp.append(0.0) - lfkp += frag.kerning_pairs - last_frag.text += frag.text - last_frag.width += frag.width - if last_frag is not None: - yield last_frag diff --git a/dist-packages/wordaxe/wordaxe/rl/paragraph.py b/dist-packages/wordaxe/wordaxe/rl/paragraph.py deleted file mode 100755 index 71606d6b7..000000000 --- a/dist-packages/wordaxe/wordaxe/rl/paragraph.py +++ /dev/null @@ -1,11 +0,0 @@ -#!/bin/env python -# -*- coding: iso-8859-1 -*- -#Copyright ReportLab Europe Ltd. 2000-2004 -# -#see license.txt for license details -#history http://www.reportlab.co.uk/cgi-bin/viewcvs.cgi/public/reportlab/trunk/platypus/paragraph.py -#$Header: /cvsroot/deco-cow/hyphenation/reportlab/platypus/paragraph.py,v 1.1.1.1 2004/04/27 21:19:02 hvbargen Exp $ -# -# @CHANGED Henning von Bargen, added hyphenation support. - -from wordaxe.rl.NewParagraph import * diff --git a/dist-packages/wordaxe/wordaxe/rl/paraparser.py b/dist-packages/wordaxe/wordaxe/rl/paraparser.py deleted file mode 100755 index a590b2740..000000000 --- a/dist-packages/wordaxe/wordaxe/rl/paraparser.py +++ /dev/null @@ -1,31 +0,0 @@ -#Copyright ReportLab Europe Ltd. 2000-2004 -#see license.txt for license details -#history http://www.reportlab.co.uk/cgi-bin/viewcvs.cgi/public/reportlab/trunk/reportlab/platypus/paraparser.py -__version__=''' $Id: paraparser.py 2853 2006-05-10 12:56:39Z rgbecker $ ''' - -from reportlab.platypus.paraparser import * - -_orig_parser = ParaParser - -class ParaParser(_orig_parser): - - def setEncoding(self, enc): - self._enc = enc - - def parse(self, text, style): - - # HVB 20070201 - if not hasattr(self, "_enc"): - self._enc = 'cp1252' #our legacy default - enc = self._enc - - return _orig_parser.parse(self, text, style) - -class NoBrParaParser(ParaParser): - """ParaParser with support for 'nobr' Tags.""" - - def start_nobr( self, attributes ): - self._push(nobr=True) - - def end_nobr( self ): - self._pop(nobr=True) diff --git a/dist-packages/wordaxe/wordaxe/rl/rl_codecs.py b/dist-packages/wordaxe/wordaxe/rl/rl_codecs.py deleted file mode 100755 index 1b69c0903..000000000 --- a/dist-packages/wordaxe/wordaxe/rl/rl_codecs.py +++ /dev/null @@ -1,1034 +0,0 @@ -#codecs support -__all__=['RL_Codecs'] -class RL_Codecs: - __rl_codecs_data = { - 'winansi':({ - 0x007f: 0x2022, # BULLET - 0x0080: 0x20ac, # EURO SIGN - 0x0081: 0x2022, # BULLET - 0x0082: 0x201a, # SINGLE LOW-9 QUOTATION MARK - 0x0083: 0x0192, # LATIN SMALL LETTER F WITH HOOK - 0x0084: 0x201e, # DOUBLE LOW-9 QUOTATION MARK - 0x0085: 0x2026, # HORIZONTAL ELLIPSIS - 0x0086: 0x2020, # DAGGER - 0x0087: 0x2021, # DOUBLE DAGGER - 0x0088: 0x02c6, # MODIFIER LETTER CIRCUMFLEX ACCENT - 0x0089: 0x2030, # PER MILLE SIGN - 0x008a: 0x0160, # LATIN CAPITAL LETTER S WITH CARON - 0x008b: 0x2039, # SINGLE LEFT-POINTING ANGLE QUOTATION MARK - 0x008c: 0x0152, # LATIN CAPITAL LIGATURE OE - 0x008d: 0x2022, # BULLET - 0x008e: 0x017d, # LATIN CAPITAL LETTER Z WITH CARON - 0x008f: 0x2022, # BULLET - 0x0090: 0x2022, # BULLET - 0x0091: 0x2018, # LEFT SINGLE QUOTATION MARK - 0x0092: 0x2019, # RIGHT SINGLE QUOTATION MARK - 0x0093: 0x201c, # LEFT DOUBLE QUOTATION MARK - 0x0094: 0x201d, # RIGHT DOUBLE QUOTATION MARK - 0x0095: 0x2022, # BULLET - 0x0096: 0x2013, # EN DASH - 0x0097: 0x2014, # EM DASH - 0x0098: 0x02dc, # SMALL TILDE - 0x0099: 0x2122, # TRADE MARK SIGN - 0x009a: 0x0161, # LATIN SMALL LETTER S WITH CARON - 0x009b: 0x203a, # SINGLE RIGHT-POINTING ANGLE QUOTATION MARK - 0x009c: 0x0153, # LATIN SMALL LIGATURE OE - 0x009d: 0x2022, # BULLET - 0x009e: 0x017e, # LATIN SMALL LETTER Z WITH CARON - 0x009f: 0x0178, # LATIN CAPITAL LETTER Y WITH DIAERESIS - 0x00a0: 0x0020, # SPACE - #### HVB, 23.10.2006 0x00ad: 0x002d, # HYPHEN-MINUS - #### HVB, 23.10.2006 }, {0x2022:0x7f,0x20:0x20,0x2d:0x2d,0xa0:0x20}), - }, {0x2022:0x7f,0x20:0x20,0x2d:0x2d,0xa0:0x20, 0xad:0x00ad}), - 'macroman':({ - 0x007f: None, # UNDEFINED - 0x0080: 0x00c4, # LATIN CAPITAL LETTER A WITH DIAERESIS - 0x0081: 0x00c5, # LATIN CAPITAL LETTER A WITH RING ABOVE - 0x0082: 0x00c7, # LATIN CAPITAL LETTER C WITH CEDILLA - 0x0083: 0x00c9, # LATIN CAPITAL LETTER E WITH ACUTE - 0x0084: 0x00d1, # LATIN CAPITAL LETTER N WITH TILDE - 0x0085: 0x00d6, # LATIN CAPITAL LETTER O WITH DIAERESIS - 0x0086: 0x00dc, # LATIN CAPITAL LETTER U WITH DIAERESIS - 0x0087: 0x00e1, # LATIN SMALL LETTER A WITH ACUTE - 0x0088: 0x00e0, # LATIN SMALL LETTER A WITH GRAVE - 0x0089: 0x00e2, # LATIN SMALL LETTER A WITH CIRCUMFLEX - 0x008a: 0x00e4, # LATIN SMALL LETTER A WITH DIAERESIS - 0x008b: 0x00e3, # LATIN SMALL LETTER A WITH TILDE - 0x008c: 0x00e5, # LATIN SMALL LETTER A WITH RING ABOVE - 0x008d: 0x00e7, # LATIN SMALL LETTER C WITH CEDILLA - 0x008e: 0x00e9, # LATIN SMALL LETTER E WITH ACUTE - 0x008f: 0x00e8, # LATIN SMALL LETTER E WITH GRAVE - 0x0090: 0x00ea, # LATIN SMALL LETTER E WITH CIRCUMFLEX - 0x0091: 0x00eb, # LATIN SMALL LETTER E WITH DIAERESIS - 0x0092: 0x00ed, # LATIN SMALL LETTER I WITH ACUTE - 0x0093: 0x00ec, # LATIN SMALL LETTER I WITH GRAVE - 0x0094: 0x00ee, # LATIN SMALL LETTER I WITH CIRCUMFLEX - 0x0095: 0x00ef, # LATIN SMALL LETTER I WITH DIAERESIS - 0x0096: 0x00f1, # LATIN SMALL LETTER N WITH TILDE - 0x0097: 0x00f3, # LATIN SMALL LETTER O WITH ACUTE - 0x0098: 0x00f2, # LATIN SMALL LETTER O WITH GRAVE - 0x0099: 0x00f4, # LATIN SMALL LETTER O WITH CIRCUMFLEX - 0x009a: 0x00f6, # LATIN SMALL LETTER O WITH DIAERESIS - 0x009b: 0x00f5, # LATIN SMALL LETTER O WITH TILDE - 0x009c: 0x00fa, # LATIN SMALL LETTER U WITH ACUTE - 0x009d: 0x00f9, # LATIN SMALL LETTER U WITH GRAVE - 0x009e: 0x00fb, # LATIN SMALL LETTER U WITH CIRCUMFLEX - 0x009f: 0x00fc, # LATIN SMALL LETTER U WITH DIAERESIS - 0x00a0: 0x2020, # DAGGER - 0x00a1: 0x00b0, # DEGREE SIGN - 0x00a4: 0x00a7, # SECTION SIGN - 0x00a5: 0x2022, # BULLET - 0x00a6: 0x00b6, # PILCROW SIGN - 0x00a7: 0x00df, # LATIN SMALL LETTER SHARP S - 0x00a8: 0x00ae, # REGISTERED SIGN - 0x00aa: 0x2122, # TRADE MARK SIGN - 0x00ab: 0x00b4, # ACUTE ACCENT - 0x00ac: 0x00a8, # DIAERESIS - 0x00ad: None, # UNDEFINED - 0x00ae: 0x00c6, # LATIN CAPITAL LETTER AE - 0x00af: 0x00d8, # LATIN CAPITAL LETTER O WITH STROKE - 0x00b0: None, # UNDEFINED - 0x00b2: None, # UNDEFINED - 0x00b3: None, # UNDEFINED - 0x00b4: 0x00a5, # YEN SIGN - 0x00b6: None, # UNDEFINED - 0x00b7: None, # UNDEFINED - 0x00b8: None, # UNDEFINED - 0x00b9: None, # UNDEFINED - 0x00ba: None, # UNDEFINED - 0x00bb: 0x00aa, # FEMININE ORDINAL INDICATOR - 0x00bc: 0x00ba, # MASCULINE ORDINAL INDICATOR - 0x00bd: None, # UNDEFINED - 0x00be: 0x00e6, # LATIN SMALL LETTER AE - 0x00bf: 0x00f8, # LATIN SMALL LETTER O WITH STROKE - 0x00c0: 0x00bf, # INVERTED QUESTION MARK - 0x00c1: 0x00a1, # INVERTED EXCLAMATION MARK - 0x00c2: 0x00ac, # NOT SIGN - 0x00c3: None, # UNDEFINED - 0x00c4: 0x0192, # LATIN SMALL LETTER F WITH HOOK - 0x00c5: None, # UNDEFINED - 0x00c6: None, # UNDEFINED - 0x00c7: 0x00ab, # LEFT-POINTING DOUBLE ANGLE QUOTATION MARK - 0x00c8: 0x00bb, # RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK - 0x00c9: 0x2026, # HORIZONTAL ELLIPSIS - 0x00ca: 0x0020, # SPACE - 0x00cb: 0x00c0, # LATIN CAPITAL LETTER A WITH GRAVE - 0x00cc: 0x00c3, # LATIN CAPITAL LETTER A WITH TILDE - 0x00cd: 0x00d5, # LATIN CAPITAL LETTER O WITH TILDE - 0x00ce: 0x0152, # LATIN CAPITAL LIGATURE OE - 0x00cf: 0x0153, # LATIN SMALL LIGATURE OE - 0x00d0: 0x2013, # EN DASH - 0x00d1: 0x2014, # EM DASH - 0x00d2: 0x201c, # LEFT DOUBLE QUOTATION MARK - 0x00d3: 0x201d, # RIGHT DOUBLE QUOTATION MARK - 0x00d4: 0x2018, # LEFT SINGLE QUOTATION MARK - 0x00d5: 0x2019, # RIGHT SINGLE QUOTATION MARK - 0x00d6: 0x00f7, # DIVISION SIGN - 0x00d7: None, # UNDEFINED - 0x00d8: 0x00ff, # LATIN SMALL LETTER Y WITH DIAERESIS - 0x00d9: 0x0178, # LATIN CAPITAL LETTER Y WITH DIAERESIS - 0x00da: 0x2044, # FRACTION SLASH - 0x00db: 0x00a4, # CURRENCY SIGN - 0x00dc: 0x2039, # SINGLE LEFT-POINTING ANGLE QUOTATION MARK - 0x00dd: 0x203a, # SINGLE RIGHT-POINTING ANGLE QUOTATION MARK - 0x00de: 0xfb01, # LATIN SMALL LIGATURE FI - 0x00df: 0xfb02, # LATIN SMALL LIGATURE FL - 0x00e0: 0x2021, # DOUBLE DAGGER - 0x00e1: 0x00b7, # MIDDLE DOT - 0x00e2: 0x201a, # SINGLE LOW-9 QUOTATION MARK - 0x00e3: 0x201e, # DOUBLE LOW-9 QUOTATION MARK - 0x00e4: 0x2030, # PER MILLE SIGN - 0x00e5: 0x00c2, # LATIN CAPITAL LETTER A WITH CIRCUMFLEX - 0x00e6: 0x00ca, # LATIN CAPITAL LETTER E WITH CIRCUMFLEX - 0x00e7: 0x00c1, # LATIN CAPITAL LETTER A WITH ACUTE - 0x00e8: 0x00cb, # LATIN CAPITAL LETTER E WITH DIAERESIS - 0x00e9: 0x00c8, # LATIN CAPITAL LETTER E WITH GRAVE - 0x00ea: 0x00cd, # LATIN CAPITAL LETTER I WITH ACUTE - 0x00eb: 0x00ce, # LATIN CAPITAL LETTER I WITH CIRCUMFLEX - 0x00ec: 0x00cf, # LATIN CAPITAL LETTER I WITH DIAERESIS - 0x00ed: 0x00cc, # LATIN CAPITAL LETTER I WITH GRAVE - 0x00ee: 0x00d3, # LATIN CAPITAL LETTER O WITH ACUTE - 0x00ef: 0x00d4, # LATIN CAPITAL LETTER O WITH CIRCUMFLEX - 0x00f0: None, # UNDEFINED - 0x00f1: 0x00d2, # LATIN CAPITAL LETTER O WITH GRAVE - 0x00f2: 0x00da, # LATIN CAPITAL LETTER U WITH ACUTE - 0x00f3: 0x00db, # LATIN CAPITAL LETTER U WITH CIRCUMFLEX - 0x00f4: 0x00d9, # LATIN CAPITAL LETTER U WITH GRAVE - 0x00f5: 0x0131, # LATIN SMALL LETTER DOTLESS I - 0x00f6: 0x02c6, # MODIFIER LETTER CIRCUMFLEX ACCENT - 0x00f7: 0x02dc, # SMALL TILDE - 0x00f8: 0x00af, # MACRON - 0x00f9: 0x02d8, # BREVE - 0x00fa: 0x02d9, # DOT ABOVE - 0x00fb: 0x02da, # RING ABOVE - 0x00fc: 0x00b8, # CEDILLA - 0x00fd: 0x02dd, # DOUBLE ACUTE ACCENT - 0x00fe: 0x02db, # OGONEK - 0x00ff: 0x02c7, # CARON - },None), - 'standard':({ - 0x0027: 0x2019, # RIGHT SINGLE QUOTATION MARK - 0x0060: 0x2018, # LEFT SINGLE QUOTATION MARK - 0x007f: None, # UNDEFINED - 0x0080: None, # UNDEFINED - 0x0081: None, # UNDEFINED - 0x0082: None, # UNDEFINED - 0x0083: None, # UNDEFINED - 0x0084: None, # UNDEFINED - 0x0085: None, # UNDEFINED - 0x0086: None, # UNDEFINED - 0x0087: None, # UNDEFINED - 0x0088: None, # UNDEFINED - 0x0089: None, # UNDEFINED - 0x008a: None, # UNDEFINED - 0x008b: None, # UNDEFINED - 0x008c: None, # UNDEFINED - 0x008d: None, # UNDEFINED - 0x008e: None, # UNDEFINED - 0x008f: None, # UNDEFINED - 0x0090: None, # UNDEFINED - 0x0091: None, # UNDEFINED - 0x0092: None, # UNDEFINED - 0x0093: None, # UNDEFINED - 0x0094: None, # UNDEFINED - 0x0095: None, # UNDEFINED - 0x0096: None, # UNDEFINED - 0x0097: None, # UNDEFINED - 0x0098: None, # UNDEFINED - 0x0099: None, # UNDEFINED - 0x009a: None, # UNDEFINED - 0x009b: None, # UNDEFINED - 0x009c: None, # UNDEFINED - 0x009d: None, # UNDEFINED - 0x009e: None, # UNDEFINED - 0x009f: None, # UNDEFINED - 0x00a0: None, # UNDEFINED - 0x00a4: 0x2044, # FRACTION SLASH - 0x00a6: 0x0192, # LATIN SMALL LETTER F WITH HOOK - 0x00a8: 0x00a4, # CURRENCY SIGN - 0x00a9: 0x0027, # APOSTROPHE - 0x00aa: 0x201c, # LEFT DOUBLE QUOTATION MARK - 0x00ac: 0x2039, # SINGLE LEFT-POINTING ANGLE QUOTATION MARK - 0x00ad: 0x203a, # SINGLE RIGHT-POINTING ANGLE QUOTATION MARK - 0x00ae: 0xfb01, # LATIN SMALL LIGATURE FI - 0x00af: 0xfb02, # LATIN SMALL LIGATURE FL - 0x00b0: None, # UNDEFINED - 0x00b1: 0x2013, # EN DASH - 0x00b2: 0x2020, # DAGGER - 0x00b3: 0x2021, # DOUBLE DAGGER - 0x00b4: 0x00b7, # MIDDLE DOT - 0x00b5: None, # UNDEFINED - 0x00b7: 0x2022, # BULLET - 0x00b8: 0x201a, # SINGLE LOW-9 QUOTATION MARK - 0x00b9: 0x201e, # DOUBLE LOW-9 QUOTATION MARK - 0x00ba: 0x201d, # RIGHT DOUBLE QUOTATION MARK - 0x00bc: 0x2026, # HORIZONTAL ELLIPSIS - 0x00bd: 0x2030, # PER MILLE SIGN - 0x00be: None, # UNDEFINED - 0x00c0: None, # UNDEFINED - 0x00c1: 0x0060, # GRAVE ACCENT - 0x00c2: 0x00b4, # ACUTE ACCENT - 0x00c3: 0x02c6, # MODIFIER LETTER CIRCUMFLEX ACCENT - 0x00c4: 0x02dc, # SMALL TILDE - 0x00c5: 0x00af, # MACRON - 0x00c6: 0x02d8, # BREVE - 0x00c7: 0x02d9, # DOT ABOVE - 0x00c8: 0x00a8, # DIAERESIS - 0x00c9: None, # UNDEFINED - 0x00ca: 0x02da, # RING ABOVE - 0x00cb: 0x00b8, # CEDILLA - 0x00cc: None, # UNDEFINED - 0x00cd: 0x02dd, # DOUBLE ACUTE ACCENT - 0x00ce: 0x02db, # OGONEK - 0x00cf: 0x02c7, # CARON - 0x00d0: 0x2014, # EM DASH - 0x00d1: None, # UNDEFINED - 0x00d2: None, # UNDEFINED - 0x00d3: None, # UNDEFINED - 0x00d4: None, # UNDEFINED - 0x00d5: None, # UNDEFINED - 0x00d6: None, # UNDEFINED - 0x00d7: None, # UNDEFINED - 0x00d8: None, # UNDEFINED - 0x00d9: None, # UNDEFINED - 0x00da: None, # UNDEFINED - 0x00db: None, # UNDEFINED - 0x00dc: None, # UNDEFINED - 0x00dd: None, # UNDEFINED - 0x00de: None, # UNDEFINED - 0x00df: None, # UNDEFINED - 0x00e0: None, # UNDEFINED - 0x00e1: 0x00c6, # LATIN CAPITAL LETTER AE - 0x00e2: None, # UNDEFINED - 0x00e3: 0x00aa, # FEMININE ORDINAL INDICATOR - 0x00e4: None, # UNDEFINED - 0x00e5: None, # UNDEFINED - 0x00e6: None, # UNDEFINED - 0x00e7: None, # UNDEFINED - 0x00e8: 0x0141, # LATIN CAPITAL LETTER L WITH STROKE - 0x00e9: 0x00d8, # LATIN CAPITAL LETTER O WITH STROKE - 0x00ea: 0x0152, # LATIN CAPITAL LIGATURE OE - 0x00eb: 0x00ba, # MASCULINE ORDINAL INDICATOR - 0x00ec: None, # UNDEFINED - 0x00ed: None, # UNDEFINED - 0x00ee: None, # UNDEFINED - 0x00ef: None, # UNDEFINED - 0x00f0: None, # UNDEFINED - 0x00f1: 0x00e6, # LATIN SMALL LETTER AE - 0x00f2: None, # UNDEFINED - 0x00f3: None, # UNDEFINED - 0x00f4: None, # UNDEFINED - 0x00f5: 0x0131, # LATIN SMALL LETTER DOTLESS I - 0x00f6: None, # UNDEFINED - 0x00f7: None, # UNDEFINED - 0x00f8: 0x0142, # LATIN SMALL LETTER L WITH STROKE - 0x00f9: 0x00f8, # LATIN SMALL LETTER O WITH STROKE - 0x00fa: 0x0153, # LATIN SMALL LIGATURE OE - 0x00fb: 0x00df, # LATIN SMALL LETTER SHARP S - 0x00fc: None, # UNDEFINED - 0x00fd: None, # UNDEFINED - 0x00fe: None, # UNDEFINED - 0x00ff: None, # UNDEFINED - },None), - 'symbol':({ - 0x0022: 0x2200, # FOR ALL - 0x0024: 0x2203, # THERE EXISTS - 0x0027: 0x220b, # CONTAINS AS MEMBER - 0x002a: 0x2217, # ASTERISK OPERATOR - 0x002d: 0x2212, # MINUS SIGN - 0x0040: 0x2245, # APPROXIMATELY EQUAL TO - 0x0041: 0x0391, # GREEK CAPITAL LETTER ALPHA - 0x0042: 0x0392, # GREEK CAPITAL LETTER BETA - 0x0043: 0x03a7, # GREEK CAPITAL LETTER CHI - 0x0044: 0x2206, # INCREMENT - 0x0045: 0x0395, # GREEK CAPITAL LETTER EPSILON - 0x0046: 0x03a6, # GREEK CAPITAL LETTER PHI - 0x0047: 0x0393, # GREEK CAPITAL LETTER GAMMA - 0x0048: 0x0397, # GREEK CAPITAL LETTER ETA - 0x0049: 0x0399, # GREEK CAPITAL LETTER IOTA - 0x004a: 0x03d1, # GREEK THETA SYMBOL - 0x004b: 0x039a, # GREEK CAPITAL LETTER KAPPA - 0x004c: 0x039b, # GREEK CAPITAL LETTER LAMDA - 0x004d: 0x039c, # GREEK CAPITAL LETTER MU - 0x004e: 0x039d, # GREEK CAPITAL LETTER NU - 0x004f: 0x039f, # GREEK CAPITAL LETTER OMICRON - 0x0050: 0x03a0, # GREEK CAPITAL LETTER PI - 0x0051: 0x0398, # GREEK CAPITAL LETTER THETA - 0x0052: 0x03a1, # GREEK CAPITAL LETTER RHO - 0x0053: 0x03a3, # GREEK CAPITAL LETTER SIGMA - 0x0054: 0x03a4, # GREEK CAPITAL LETTER TAU - 0x0055: 0x03a5, # GREEK CAPITAL LETTER UPSILON - 0x0056: 0x03c2, # GREEK SMALL LETTER FINAL SIGMA - 0x0057: 0x2126, # OHM SIGN - 0x0058: 0x039e, # GREEK CAPITAL LETTER XI - 0x0059: 0x03a8, # GREEK CAPITAL LETTER PSI - 0x005a: 0x0396, # GREEK CAPITAL LETTER ZETA - 0x005c: 0x2234, # THEREFORE - 0x005e: 0x22a5, # UP TACK - 0x0060: 0xf8e5, # [unknown unicode name for radicalex] - 0x0061: 0x03b1, # GREEK SMALL LETTER ALPHA - 0x0062: 0x03b2, # GREEK SMALL LETTER BETA - 0x0063: 0x03c7, # GREEK SMALL LETTER CHI - 0x0064: 0x03b4, # GREEK SMALL LETTER DELTA - 0x0065: 0x03b5, # GREEK SMALL LETTER EPSILON - 0x0066: 0x03c6, # GREEK SMALL LETTER PHI - 0x0067: 0x03b3, # GREEK SMALL LETTER GAMMA - 0x0068: 0x03b7, # GREEK SMALL LETTER ETA - 0x0069: 0x03b9, # GREEK SMALL LETTER IOTA - 0x006a: 0x03d5, # GREEK PHI SYMBOL - 0x006b: 0x03ba, # GREEK SMALL LETTER KAPPA - 0x006c: 0x03bb, # GREEK SMALL LETTER LAMDA - 0x006d: 0x00b5, # MICRO SIGN - 0x006e: 0x03bd, # GREEK SMALL LETTER NU - 0x006f: 0x03bf, # GREEK SMALL LETTER OMICRON - 0x0070: 0x03c0, # GREEK SMALL LETTER PI - 0x0071: 0x03b8, # GREEK SMALL LETTER THETA - 0x0072: 0x03c1, # GREEK SMALL LETTER RHO - 0x0073: 0x03c3, # GREEK SMALL LETTER SIGMA - 0x0074: 0x03c4, # GREEK SMALL LETTER TAU - 0x0075: 0x03c5, # GREEK SMALL LETTER UPSILON - 0x0076: 0x03d6, # GREEK PI SYMBOL - 0x0077: 0x03c9, # GREEK SMALL LETTER OMEGA - 0x0078: 0x03be, # GREEK SMALL LETTER XI - 0x0079: 0x03c8, # GREEK SMALL LETTER PSI - 0x007a: 0x03b6, # GREEK SMALL LETTER ZETA - 0x007e: 0x223c, # TILDE OPERATOR - 0x007f: None, # UNDEFINED - 0x0080: None, # UNDEFINED - 0x0081: None, # UNDEFINED - 0x0082: None, # UNDEFINED - 0x0083: None, # UNDEFINED - 0x0084: None, # UNDEFINED - 0x0085: None, # UNDEFINED - 0x0086: None, # UNDEFINED - 0x0087: None, # UNDEFINED - 0x0088: None, # UNDEFINED - 0x0089: None, # UNDEFINED - 0x008a: None, # UNDEFINED - 0x008b: None, # UNDEFINED - 0x008c: None, # UNDEFINED - 0x008d: None, # UNDEFINED - 0x008e: None, # UNDEFINED - 0x008f: None, # UNDEFINED - 0x0090: None, # UNDEFINED - 0x0091: None, # UNDEFINED - 0x0092: None, # UNDEFINED - 0x0093: None, # UNDEFINED - 0x0094: None, # UNDEFINED - 0x0095: None, # UNDEFINED - 0x0096: None, # UNDEFINED - 0x0097: None, # UNDEFINED - 0x0098: None, # UNDEFINED - 0x0099: None, # UNDEFINED - 0x009a: None, # UNDEFINED - 0x009b: None, # UNDEFINED - 0x009c: None, # UNDEFINED - 0x009d: None, # UNDEFINED - 0x009e: None, # UNDEFINED - 0x009f: None, # UNDEFINED - 0x00a0: 0x20ac, # EURO SIGN - 0x00a1: 0x03d2, # GREEK UPSILON WITH HOOK SYMBOL - 0x00a2: 0x2032, # PRIME - 0x00a3: 0x2264, # LESS-THAN OR EQUAL TO - 0x00a4: 0x2044, # FRACTION SLASH - 0x00a5: 0x221e, # INFINITY - 0x00a6: 0x0192, # LATIN SMALL LETTER F WITH HOOK - 0x00a7: 0x2663, # BLACK CLUB SUIT - 0x00a8: 0x2666, # BLACK DIAMOND SUIT - 0x00a9: 0x2665, # BLACK HEART SUIT - 0x00aa: 0x2660, # BLACK SPADE SUIT - 0x00ab: 0x2194, # LEFT RIGHT ARROW - 0x00ac: 0x2190, # LEFTWARDS ARROW - 0x00ad: 0x2191, # UPWARDS ARROW - 0x00ae: 0x2192, # RIGHTWARDS ARROW - 0x00af: 0x2193, # DOWNWARDS ARROW - 0x00b2: 0x2033, # DOUBLE PRIME - 0x00b3: 0x2265, # GREATER-THAN OR EQUAL TO - 0x00b4: 0x00d7, # MULTIPLICATION SIGN - 0x00b5: 0x221d, # PROPORTIONAL TO - 0x00b6: 0x2202, # PARTIAL DIFFERENTIAL - 0x00b7: 0x2022, # BULLET - 0x00b8: 0x00f7, # DIVISION SIGN - 0x00b9: 0x2260, # NOT EQUAL TO - 0x00ba: 0x2261, # IDENTICAL TO - 0x00bb: 0x2248, # ALMOST EQUAL TO - 0x00bc: 0x2026, # HORIZONTAL ELLIPSIS - 0x00bd: 0xf8e6, # [unknown unicode name for arrowvertex] - 0x00be: 0xf8e7, # [unknown unicode name for arrowhorizex] - 0x00bf: 0x21b5, # DOWNWARDS ARROW WITH CORNER LEFTWARDS - 0x00c0: 0x2135, # ALEF SYMBOL - 0x00c1: 0x2111, # BLACK-LETTER CAPITAL I - 0x00c2: 0x211c, # BLACK-LETTER CAPITAL R - 0x00c3: 0x2118, # SCRIPT CAPITAL P - 0x00c4: 0x2297, # CIRCLED TIMES - 0x00c5: 0x2295, # CIRCLED PLUS - 0x00c6: 0x2205, # EMPTY SET - 0x00c7: 0x2229, # INTERSECTION - 0x00c8: 0x222a, # UNION - 0x00c9: 0x2283, # SUPERSET OF - 0x00ca: 0x2287, # SUPERSET OF OR EQUAL TO - 0x00cb: 0x2284, # NOT A SUBSET OF - 0x00cc: 0x2282, # SUBSET OF - 0x00cd: 0x2286, # SUBSET OF OR EQUAL TO - 0x00ce: 0x2208, # ELEMENT OF - 0x00cf: 0x2209, # NOT AN ELEMENT OF - 0x00d0: 0x2220, # ANGLE - 0x00d1: 0x2207, # NABLA - 0x00d2: 0xf6da, # [unknown unicode name for registerserif] - 0x00d3: 0xf6d9, # [unknown unicode name for copyrightserif] - 0x00d4: 0xf6db, # [unknown unicode name for trademarkserif] - 0x00d5: 0x220f, # N-ARY PRODUCT - 0x00d6: 0x221a, # SQUARE ROOT - 0x00d7: 0x22c5, # DOT OPERATOR - 0x00d8: 0x00ac, # NOT SIGN - 0x00d9: 0x2227, # LOGICAL AND - 0x00da: 0x2228, # LOGICAL OR - 0x00db: 0x21d4, # LEFT RIGHT DOUBLE ARROW - 0x00dc: 0x21d0, # LEFTWARDS DOUBLE ARROW - 0x00dd: 0x21d1, # UPWARDS DOUBLE ARROW - 0x00de: 0x21d2, # RIGHTWARDS DOUBLE ARROW - 0x00df: 0x21d3, # DOWNWARDS DOUBLE ARROW - 0x00e0: 0x25ca, # LOZENGE - 0x00e1: 0x2329, # LEFT-POINTING ANGLE BRACKET - 0x00e2: 0xf8e8, # [unknown unicode name for registersans] - 0x00e3: 0xf8e9, # [unknown unicode name for copyrightsans] - 0x00e4: 0xf8ea, # [unknown unicode name for trademarksans] - 0x00e5: 0x2211, # N-ARY SUMMATION - 0x00e6: 0xf8eb, # [unknown unicode name for parenlefttp] - 0x00e7: 0xf8ec, # [unknown unicode name for parenleftex] - 0x00e8: 0xf8ed, # [unknown unicode name for parenleftbt] - 0x00e9: 0xf8ee, # [unknown unicode name for bracketlefttp] - 0x00ea: 0xf8ef, # [unknown unicode name for bracketleftex] - 0x00eb: 0xf8f0, # [unknown unicode name for bracketleftbt] - 0x00ec: 0xf8f1, # [unknown unicode name for bracelefttp] - 0x00ed: 0xf8f2, # [unknown unicode name for braceleftmid] - 0x00ee: 0xf8f3, # [unknown unicode name for braceleftbt] - 0x00ef: 0xf8f4, # [unknown unicode name for braceex] - 0x00f0: None, # UNDEFINED - 0x00f1: 0x232a, # RIGHT-POINTING ANGLE BRACKET - 0x00f2: 0x222b, # INTEGRAL - 0x00f3: 0x2320, # TOP HALF INTEGRAL - 0x00f4: 0xf8f5, # [unknown unicode name for integralex] - 0x00f5: 0x2321, # BOTTOM HALF INTEGRAL - 0x00f6: 0xf8f6, # [unknown unicode name for parenrighttp] - 0x00f7: 0xf8f7, # [unknown unicode name for parenrightex] - 0x00f8: 0xf8f8, # [unknown unicode name for parenrightbt] - 0x00f9: 0xf8f9, # [unknown unicode name for bracketrighttp] - 0x00fa: 0xf8fa, # [unknown unicode name for bracketrightex] - 0x00fb: 0xf8fb, # [unknown unicode name for bracketrightbt] - 0x00fc: 0xf8fc, # [unknown unicode name for bracerighttp] - 0x00fd: 0xf8fd, # [unknown unicode name for bracerightmid] - 0x00fe: 0xf8fe, # [unknown unicode name for bracerightbt] - 0x00ff: None, # UNDEFINED - }, - { - 0x0394:0x0044, # GREEK CAPITAL LETTER DELTA - 0x03a9:0x0057, # GREEK CAPITAL LETTER OMEGA - 0x03bc:0x006d, # GREEK SMALL LETTER MU - } - ), - 'zapfdingbats':({ - 0x0021: 0x2701, # UPPER BLADE SCISSORS - 0x0022: 0x2702, # BLACK SCISSORS - 0x0023: 0x2703, # LOWER BLADE SCISSORS - 0x0024: 0x2704, # WHITE SCISSORS - 0x0025: 0x260e, # BLACK TELEPHONE - 0x0026: 0x2706, # TELEPHONE LOCATION SIGN - 0x0027: 0x2707, # TAPE DRIVE - 0x0028: 0x2708, # AIRPLANE - 0x0029: 0x2709, # ENVELOPE - 0x002a: 0x261b, # BLACK RIGHT POINTING INDEX - 0x002b: 0x261e, # WHITE RIGHT POINTING INDEX - 0x002c: 0x270c, # VICTORY HAND - 0x002d: 0x270d, # WRITING HAND - 0x002e: 0x270e, # LOWER RIGHT PENCIL - 0x002f: 0x270f, # PENCIL - 0x0030: 0x2710, # UPPER RIGHT PENCIL - 0x0031: 0x2711, # WHITE NIB - 0x0032: 0x2712, # BLACK NIB - 0x0033: 0x2713, # CHECK MARK - 0x0034: 0x2714, # HEAVY CHECK MARK - 0x0035: 0x2715, # MULTIPLICATION X - 0x0036: 0x2716, # HEAVY MULTIPLICATION X - 0x0037: 0x2717, # BALLOT X - 0x0038: 0x2718, # HEAVY BALLOT X - 0x0039: 0x2719, # OUTLINED GREEK CROSS - 0x003a: 0x271a, # HEAVY GREEK CROSS - 0x003b: 0x271b, # OPEN CENTRE CROSS - 0x003c: 0x271c, # HEAVY OPEN CENTRE CROSS - 0x003d: 0x271d, # LATIN CROSS - 0x003e: 0x271e, # SHADOWED WHITE LATIN CROSS - 0x003f: 0x271f, # OUTLINED LATIN CROSS - 0x0040: 0x2720, # MALTESE CROSS - 0x0041: 0x2721, # STAR OF DAVID - 0x0042: 0x2722, # FOUR TEARDROP-SPOKED ASTERISK - 0x0043: 0x2723, # FOUR BALLOON-SPOKED ASTERISK - 0x0044: 0x2724, # HEAVY FOUR BALLOON-SPOKED ASTERISK - 0x0045: 0x2725, # FOUR CLUB-SPOKED ASTERISK - 0x0046: 0x2726, # BLACK FOUR POINTED STAR - 0x0047: 0x2727, # WHITE FOUR POINTED STAR - 0x0048: 0x2605, # BLACK STAR - 0x0049: 0x2729, # STRESS OUTLINED WHITE STAR - 0x004a: 0x272a, # CIRCLED WHITE STAR - 0x004b: 0x272b, # OPEN CENTRE BLACK STAR - 0x004c: 0x272c, # BLACK CENTRE WHITE STAR - 0x004d: 0x272d, # OUTLINED BLACK STAR - 0x004e: 0x272e, # HEAVY OUTLINED BLACK STAR - 0x004f: 0x272f, # PINWHEEL STAR - 0x0050: 0x2730, # SHADOWED WHITE STAR - 0x0051: 0x2731, # HEAVY ASTERISK - 0x0052: 0x2732, # OPEN CENTRE ASTERISK - 0x0053: 0x2733, # EIGHT SPOKED ASTERISK - 0x0054: 0x2734, # EIGHT POINTED BLACK STAR - 0x0055: 0x2735, # EIGHT POINTED PINWHEEL STAR - 0x0056: 0x2736, # SIX POINTED BLACK STAR - 0x0057: 0x2737, # EIGHT POINTED RECTILINEAR BLACK STAR - 0x0058: 0x2738, # HEAVY EIGHT POINTED RECTILINEAR BLACK STAR - 0x0059: 0x2739, # TWELVE POINTED BLACK STAR - 0x005a: 0x273a, # SIXTEEN POINTED ASTERISK - 0x005b: 0x273b, # TEARDROP-SPOKED ASTERISK - 0x005c: 0x273c, # OPEN CENTRE TEARDROP-SPOKED ASTERISK - 0x005d: 0x273d, # HEAVY TEARDROP-SPOKED ASTERISK - 0x005e: 0x273e, # SIX PETALLED BLACK AND WHITE FLORETTE - 0x005f: 0x273f, # BLACK FLORETTE - 0x0060: 0x2740, # WHITE FLORETTE - 0x0061: 0x2741, # EIGHT PETALLED OUTLINED BLACK FLORETTE - 0x0062: 0x2742, # CIRCLED OPEN CENTRE EIGHT POINTED STAR - 0x0063: 0x2743, # HEAVY TEARDROP-SPOKED PINWHEEL ASTERISK - 0x0064: 0x2744, # SNOWFLAKE - 0x0065: 0x2745, # TIGHT TRIFOLIATE SNOWFLAKE - 0x0066: 0x2746, # HEAVY CHEVRON SNOWFLAKE - 0x0067: 0x2747, # SPARKLE - 0x0068: 0x2748, # HEAVY SPARKLE - 0x0069: 0x2749, # BALLOON-SPOKED ASTERISK - 0x006a: 0x274a, # EIGHT TEARDROP-SPOKED PROPELLER ASTERISK - 0x006b: 0x274b, # HEAVY EIGHT TEARDROP-SPOKED PROPELLER ASTERISK - 0x006c: 0x25cf, # BLACK CIRCLE - 0x006d: 0x274d, # SHADOWED WHITE CIRCLE - 0x006e: 0x25a0, # BLACK SQUARE - 0x006f: 0x274f, # LOWER RIGHT DROP-SHADOWED WHITE SQUARE - 0x0070: 0x2750, # UPPER RIGHT DROP-SHADOWED WHITE SQUARE - 0x0071: 0x2751, # LOWER RIGHT SHADOWED WHITE SQUARE - 0x0072: 0x2752, # UPPER RIGHT SHADOWED WHITE SQUARE - 0x0073: 0x25b2, # BLACK UP-POINTING TRIANGLE - 0x0074: 0x25bc, # BLACK DOWN-POINTING TRIANGLE - 0x0075: 0x25c6, # BLACK DIAMOND - 0x0076: 0x2756, # BLACK DIAMOND MINUS WHITE X - 0x0077: 0x25d7, # RIGHT HALF BLACK CIRCLE - 0x0078: 0x2758, # LIGHT VERTICAL BAR - 0x0079: 0x2759, # MEDIUM VERTICAL BAR - 0x007a: 0x275a, # HEAVY VERTICAL BAR - 0x007b: 0x275b, # HEAVY SINGLE TURNED COMMA QUOTATION MARK ORNAMENT - 0x007c: 0x275c, # HEAVY SINGLE COMMA QUOTATION MARK ORNAMENT - 0x007d: 0x275d, # HEAVY DOUBLE TURNED COMMA QUOTATION MARK ORNAMENT - 0x007e: 0x275e, # HEAVY DOUBLE COMMA QUOTATION MARK ORNAMENT - 0x007f: None, # UNDEFINED - 0x0080: 0x2768, # MEDIUM LEFT PARENTHESIS ORNAMENT - 0x0081: 0x2769, # MEDIUM RIGHT PARENTHESIS ORNAMENT - 0x0082: 0x276a, # MEDIUM FLATTENED LEFT PARENTHESIS ORNAMENT - 0x0083: 0x276b, # MEDIUM FLATTENED RIGHT PARENTHESIS ORNAMENT - 0x0084: 0x276c, # MEDIUM LEFT-POINTING ANGLE BRACKET ORNAMENT - 0x0085: 0x276d, # MEDIUM RIGHT-POINTING ANGLE BRACKET ORNAMENT - 0x0086: 0x276e, # HEAVY LEFT-POINTING ANGLE QUOTATION MARK ORNAMENT - 0x0087: 0x276f, # HEAVY RIGHT-POINTING ANGLE QUOTATION MARK ORNAMENT - 0x0088: 0x2770, # HEAVY LEFT-POINTING ANGLE BRACKET ORNAMENT - 0x0089: 0x2771, # HEAVY RIGHT-POINTING ANGLE BRACKET ORNAMENT - 0x008a: 0x2772, # LIGHT LEFT TORTOISE SHELL BRACKET ORNAMENT - 0x008b: 0x2773, # LIGHT RIGHT TORTOISE SHELL BRACKET ORNAMENT - 0x008c: 0x2774, # MEDIUM LEFT CURLY BRACKET ORNAMENT - 0x008d: 0x2775, # MEDIUM RIGHT CURLY BRACKET ORNAMENT - 0x008e: None, # UNDEFINED - 0x008f: None, # UNDEFINED - 0x0090: None, # UNDEFINED - 0x0091: None, # UNDEFINED - 0x0092: None, # UNDEFINED - 0x0093: None, # UNDEFINED - 0x0094: None, # UNDEFINED - 0x0095: None, # UNDEFINED - 0x0096: None, # UNDEFINED - 0x0097: None, # UNDEFINED - 0x0098: None, # UNDEFINED - 0x0099: None, # UNDEFINED - 0x009a: None, # UNDEFINED - 0x009b: None, # UNDEFINED - 0x009c: None, # UNDEFINED - 0x009d: None, # UNDEFINED - 0x009e: None, # UNDEFINED - 0x009f: None, # UNDEFINED - 0x00a0: None, # UNDEFINED - 0x00a1: 0x2761, # CURVED STEM PARAGRAPH SIGN ORNAMENT - 0x00a2: 0x2762, # HEAVY EXCLAMATION MARK ORNAMENT - 0x00a3: 0x2763, # HEAVY HEART EXCLAMATION MARK ORNAMENT - 0x00a4: 0x2764, # HEAVY BLACK HEART - 0x00a5: 0x2765, # ROTATED HEAVY BLACK HEART BULLET - 0x00a6: 0x2766, # FLORAL HEART - 0x00a7: 0x2767, # ROTATED FLORAL HEART BULLET - 0x00a8: 0x2663, # BLACK CLUB SUIT - 0x00a9: 0x2666, # BLACK DIAMOND SUIT - 0x00aa: 0x2665, # BLACK HEART SUIT - 0x00ab: 0x2660, # BLACK SPADE SUIT - 0x00ac: 0x2460, # CIRCLED DIGIT ONE - 0x00ad: 0x2461, # CIRCLED DIGIT TWO - 0x00ae: 0x2462, # CIRCLED DIGIT THREE - 0x00af: 0x2463, # CIRCLED DIGIT FOUR - 0x00b0: 0x2464, # CIRCLED DIGIT FIVE - 0x00b1: 0x2465, # CIRCLED DIGIT SIX - 0x00b2: 0x2466, # CIRCLED DIGIT SEVEN - 0x00b3: 0x2467, # CIRCLED DIGIT EIGHT - 0x00b4: 0x2468, # CIRCLED DIGIT NINE - 0x00b5: 0x2469, # CIRCLED NUMBER TEN - 0x00b6: 0x2776, # DINGBAT NEGATIVE CIRCLED DIGIT ONE - 0x00b7: 0x2777, # DINGBAT NEGATIVE CIRCLED DIGIT TWO - 0x00b8: 0x2778, # DINGBAT NEGATIVE CIRCLED DIGIT THREE - 0x00b9: 0x2779, # DINGBAT NEGATIVE CIRCLED DIGIT FOUR - 0x00ba: 0x277a, # DINGBAT NEGATIVE CIRCLED DIGIT FIVE - 0x00bb: 0x277b, # DINGBAT NEGATIVE CIRCLED DIGIT SIX - 0x00bc: 0x277c, # DINGBAT NEGATIVE CIRCLED DIGIT SEVEN - 0x00bd: 0x277d, # DINGBAT NEGATIVE CIRCLED DIGIT EIGHT - 0x00be: 0x277e, # DINGBAT NEGATIVE CIRCLED DIGIT NINE - 0x00bf: 0x277f, # DINGBAT NEGATIVE CIRCLED NUMBER TEN - 0x00c0: 0x2780, # DINGBAT CIRCLED SANS-SERIF DIGIT ONE - 0x00c1: 0x2781, # DINGBAT CIRCLED SANS-SERIF DIGIT TWO - 0x00c2: 0x2782, # DINGBAT CIRCLED SANS-SERIF DIGIT THREE - 0x00c3: 0x2783, # DINGBAT CIRCLED SANS-SERIF DIGIT FOUR - 0x00c4: 0x2784, # DINGBAT CIRCLED SANS-SERIF DIGIT FIVE - 0x00c5: 0x2785, # DINGBAT CIRCLED SANS-SERIF DIGIT SIX - 0x00c6: 0x2786, # DINGBAT CIRCLED SANS-SERIF DIGIT SEVEN - 0x00c7: 0x2787, # DINGBAT CIRCLED SANS-SERIF DIGIT EIGHT - 0x00c8: 0x2788, # DINGBAT CIRCLED SANS-SERIF DIGIT NINE - 0x00c9: 0x2789, # DINGBAT CIRCLED SANS-SERIF NUMBER TEN - 0x00ca: 0x278a, # DINGBAT NEGATIVE CIRCLED SANS-SERIF DIGIT ONE - 0x00cb: 0x278b, # DINGBAT NEGATIVE CIRCLED SANS-SERIF DIGIT TWO - 0x00cc: 0x278c, # DINGBAT NEGATIVE CIRCLED SANS-SERIF DIGIT THREE - 0x00cd: 0x278d, # DINGBAT NEGATIVE CIRCLED SANS-SERIF DIGIT FOUR - 0x00ce: 0x278e, # DINGBAT NEGATIVE CIRCLED SANS-SERIF DIGIT FIVE - 0x00cf: 0x278f, # DINGBAT NEGATIVE CIRCLED SANS-SERIF DIGIT SIX - 0x00d0: 0x2790, # DINGBAT NEGATIVE CIRCLED SANS-SERIF DIGIT SEVEN - 0x00d1: 0x2791, # DINGBAT NEGATIVE CIRCLED SANS-SERIF DIGIT EIGHT - 0x00d2: 0x2792, # DINGBAT NEGATIVE CIRCLED SANS-SERIF DIGIT NINE - 0x00d3: 0x2793, # DINGBAT NEGATIVE CIRCLED SANS-SERIF NUMBER TEN - 0x00d4: 0x2794, # HEAVY WIDE-HEADED RIGHTWARDS ARROW - 0x00d5: 0x2192, # RIGHTWARDS ARROW - 0x00d6: 0x2194, # LEFT RIGHT ARROW - 0x00d7: 0x2195, # UP DOWN ARROW - 0x00d8: 0x2798, # HEAVY SOUTH EAST ARROW - 0x00d9: 0x2799, # HEAVY RIGHTWARDS ARROW - 0x00da: 0x279a, # HEAVY NORTH EAST ARROW - 0x00db: 0x279b, # DRAFTING POINT RIGHTWARDS ARROW - 0x00dc: 0x279c, # HEAVY ROUND-TIPPED RIGHTWARDS ARROW - 0x00dd: 0x279d, # TRIANGLE-HEADED RIGHTWARDS ARROW - 0x00de: 0x279e, # HEAVY TRIANGLE-HEADED RIGHTWARDS ARROW - 0x00df: 0x279f, # DASHED TRIANGLE-HEADED RIGHTWARDS ARROW - 0x00e0: 0x27a0, # HEAVY DASHED TRIANGLE-HEADED RIGHTWARDS ARROW - 0x00e1: 0x27a1, # BLACK RIGHTWARDS ARROW - 0x00e2: 0x27a2, # THREE-D TOP-LIGHTED RIGHTWARDS ARROWHEAD - 0x00e3: 0x27a3, # THREE-D BOTTOM-LIGHTED RIGHTWARDS ARROWHEAD - 0x00e4: 0x27a4, # BLACK RIGHTWARDS ARROWHEAD - 0x00e5: 0x27a5, # HEAVY BLACK CURVED DOWNWARDS AND RIGHTWARDS ARROW - 0x00e6: 0x27a6, # HEAVY BLACK CURVED UPWARDS AND RIGHTWARDS ARROW - 0x00e7: 0x27a7, # SQUAT BLACK RIGHTWARDS ARROW - 0x00e8: 0x27a8, # HEAVY CONCAVE-POINTED BLACK RIGHTWARDS ARROW - 0x00e9: 0x27a9, # RIGHT-SHADED WHITE RIGHTWARDS ARROW - 0x00ea: 0x27aa, # LEFT-SHADED WHITE RIGHTWARDS ARROW - 0x00eb: 0x27ab, # BACK-TILTED SHADOWED WHITE RIGHTWARDS ARROW - 0x00ec: 0x27ac, # FRONT-TILTED SHADOWED WHITE RIGHTWARDS ARROW - 0x00ed: 0x27ad, # HEAVY LOWER RIGHT-SHADOWED WHITE RIGHTWARDS ARROW - 0x00ee: 0x27ae, # HEAVY UPPER RIGHT-SHADOWED WHITE RIGHTWARDS ARROW - 0x00ef: 0x27af, # NOTCHED LOWER RIGHT-SHADOWED WHITE RIGHTWARDS ARROW - 0x00f0: None, # UNDEFINED - 0x00f1: 0x27b1, # NOTCHED UPPER RIGHT-SHADOWED WHITE RIGHTWARDS ARROW - 0x00f2: 0x27b2, # CIRCLED HEAVY WHITE RIGHTWARDS ARROW - 0x00f3: 0x27b3, # WHITE-FEATHERED RIGHTWARDS ARROW - 0x00f4: 0x27b4, # BLACK-FEATHERED SOUTH EAST ARROW - 0x00f5: 0x27b5, # BLACK-FEATHERED RIGHTWARDS ARROW - 0x00f6: 0x27b6, # BLACK-FEATHERED NORTH EAST ARROW - 0x00f7: 0x27b7, # HEAVY BLACK-FEATHERED SOUTH EAST ARROW - 0x00f8: 0x27b8, # HEAVY BLACK-FEATHERED RIGHTWARDS ARROW - 0x00f9: 0x27b9, # HEAVY BLACK-FEATHERED NORTH EAST ARROW - 0x00fa: 0x27ba, # TEARDROP-BARBED RIGHTWARDS ARROW - 0x00fb: 0x27bb, # HEAVY TEARDROP-SHANKED RIGHTWARDS ARROW - 0x00fc: 0x27bc, # WEDGE-TAILED RIGHTWARDS ARROW - 0x00fd: 0x27bd, # HEAVY WEDGE-TAILED RIGHTWARDS ARROW - 0x00fe: 0x27be, # OPEN-OUTLINED RIGHTWARDS ARROW - 0x00ff: None, # UNDEFINED - },None), - 'pdfdoc':({ - 0x007f: None, # UNDEFINED - 0x0080: 0x2022, # BULLET - 0x0081: 0x2020, # DAGGER - 0x0082: 0x2021, # DOUBLE DAGGER - 0x0083: 0x2026, # HORIZONTAL ELLIPSIS - 0x0084: 0x2014, # EM DASH - 0x0085: 0x2013, # EN DASH - 0x0086: 0x0192, # LATIN SMALL LETTER F WITH HOOK - 0x0087: 0x2044, # FRACTION SLASH - 0x0088: 0x2039, # SINGLE LEFT-POINTING ANGLE QUOTATION MARK - 0x0089: 0x203a, # SINGLE RIGHT-POINTING ANGLE QUOTATION MARK - 0x008a: 0x2212, # MINUS SIGN - 0x008b: 0x2030, # PER MILLE SIGN - 0x008c: 0x201e, # DOUBLE LOW-9 QUOTATION MARK - 0x008d: 0x201c, # LEFT DOUBLE QUOTATION MARK - 0x008e: 0x201d, # RIGHT DOUBLE QUOTATION MARK - 0x008f: 0x2018, # LEFT SINGLE QUOTATION MARK - 0x0090: 0x2019, # RIGHT SINGLE QUOTATION MARK - 0x0091: 0x201a, # SINGLE LOW-9 QUOTATION MARK - 0x0092: 0x2122, # TRADE MARK SIGN - 0x0093: 0xfb01, # LATIN SMALL LIGATURE FI - 0x0094: 0xfb02, # LATIN SMALL LIGATURE FL - 0x0095: 0x0141, # LATIN CAPITAL LETTER L WITH STROKE - 0x0096: 0x0152, # LATIN CAPITAL LIGATURE OE - 0x0097: 0x0160, # LATIN CAPITAL LETTER S WITH CARON - 0x0098: 0x0178, # LATIN CAPITAL LETTER Y WITH DIAERESIS - 0x0099: 0x017d, # LATIN CAPITAL LETTER Z WITH CARON - 0x009a: 0x0131, # LATIN SMALL LETTER DOTLESS I - 0x009b: 0x0142, # LATIN SMALL LETTER L WITH STROKE - 0x009c: 0x0153, # LATIN SMALL LIGATURE OE - 0x009d: 0x0161, # LATIN SMALL LETTER S WITH CARON - 0x009e: 0x017e, # LATIN SMALL LETTER Z WITH CARON - 0x009f: None, # UNDEFINED - 0x00a0: 0x20ac, # EURO SIGN - 0x00ad: None, # UNDEFINED - 24: 0x02d8, #breve - 25: 0x02c7, #caron - 26: 0x02c6, #circumflex - 27: 0x02d9, #dotaccent - 28: 0x02dd, #hungarumlaut - 29: 0x02db, #ogonek - 30: 0x02da, #ring - 31: 0x02dc, #tilde - },None), - 'macexpert':({ - 0x0021: 0xf721, # [unknown unicode name for exclamsmall] - 0x0022: 0xf6f8, # [unknown unicode name for Hungarumlautsmall] - 0x0023: 0xf7a2, # [unknown unicode name for centoldstyle] - 0x0024: 0xf724, # [unknown unicode name for dollaroldstyle] - 0x0025: 0xf6e4, # [unknown unicode name for dollarsuperior] - 0x0026: 0xf726, # [unknown unicode name for ampersandsmall] - 0x0027: 0xf7b4, # [unknown unicode name for Acutesmall] - 0x0028: 0x207d, # SUPERSCRIPT LEFT PARENTHESIS - 0x0029: 0x207e, # SUPERSCRIPT RIGHT PARENTHESIS - 0x002a: 0x2025, # TWO DOT LEADER - 0x002b: 0x2024, # ONE DOT LEADER - 0x002f: 0x2044, # FRACTION SLASH - 0x0030: 0xf730, # [unknown unicode name for zerooldstyle] - 0x0031: 0xf731, # [unknown unicode name for oneoldstyle] - 0x0032: 0xf732, # [unknown unicode name for twooldstyle] - 0x0033: 0xf733, # [unknown unicode name for threeoldstyle] - 0x0034: 0xf734, # [unknown unicode name for fouroldstyle] - 0x0035: 0xf735, # [unknown unicode name for fiveoldstyle] - 0x0036: 0xf736, # [unknown unicode name for sixoldstyle] - 0x0037: 0xf737, # [unknown unicode name for sevenoldstyle] - 0x0038: 0xf738, # [unknown unicode name for eightoldstyle] - 0x0039: 0xf739, # [unknown unicode name for nineoldstyle] - 0x003c: None, # UNDEFINED - 0x003d: 0xf6de, # [unknown unicode name for threequartersemdash] - 0x003e: None, # UNDEFINED - 0x003f: 0xf73f, # [unknown unicode name for questionsmall] - 0x0040: None, # UNDEFINED - 0x0041: None, # UNDEFINED - 0x0042: None, # UNDEFINED - 0x0043: None, # UNDEFINED - 0x0044: 0xf7f0, # [unknown unicode name for Ethsmall] - 0x0045: None, # UNDEFINED - 0x0046: None, # UNDEFINED - 0x0047: 0x00bc, # VULGAR FRACTION ONE QUARTER - 0x0048: 0x00bd, # VULGAR FRACTION ONE HALF - 0x0049: 0x00be, # VULGAR FRACTION THREE QUARTERS - 0x004a: 0x215b, # VULGAR FRACTION ONE EIGHTH - 0x004b: 0x215c, # VULGAR FRACTION THREE EIGHTHS - 0x004c: 0x215d, # VULGAR FRACTION FIVE EIGHTHS - 0x004d: 0x215e, # VULGAR FRACTION SEVEN EIGHTHS - 0x004e: 0x2153, # VULGAR FRACTION ONE THIRD - 0x004f: 0x2154, # VULGAR FRACTION TWO THIRDS - 0x0050: None, # UNDEFINED - 0x0051: None, # UNDEFINED - 0x0052: None, # UNDEFINED - 0x0053: None, # UNDEFINED - 0x0054: None, # UNDEFINED - 0x0055: None, # UNDEFINED - 0x0056: 0xfb00, # LATIN SMALL LIGATURE FF - 0x0057: 0xfb01, # LATIN SMALL LIGATURE FI - 0x0058: 0xfb02, # LATIN SMALL LIGATURE FL - 0x0059: 0xfb03, # LATIN SMALL LIGATURE FFI - 0x005a: 0xfb04, # LATIN SMALL LIGATURE FFL - 0x005b: 0x208d, # SUBSCRIPT LEFT PARENTHESIS - 0x005c: None, # UNDEFINED - 0x005d: 0x208e, # SUBSCRIPT RIGHT PARENTHESIS - 0x005e: 0xf6f6, # [unknown unicode name for Circumflexsmall] - 0x005f: 0xf6e5, # [unknown unicode name for hypheninferior] - 0x0060: 0xf760, # [unknown unicode name for Gravesmall] - 0x0061: 0xf761, # [unknown unicode name for Asmall] - 0x0062: 0xf762, # [unknown unicode name for Bsmall] - 0x0063: 0xf763, # [unknown unicode name for Csmall] - 0x0064: 0xf764, # [unknown unicode name for Dsmall] - 0x0065: 0xf765, # [unknown unicode name for Esmall] - 0x0066: 0xf766, # [unknown unicode name for Fsmall] - 0x0067: 0xf767, # [unknown unicode name for Gsmall] - 0x0068: 0xf768, # [unknown unicode name for Hsmall] - 0x0069: 0xf769, # [unknown unicode name for Ismall] - 0x006a: 0xf76a, # [unknown unicode name for Jsmall] - 0x006b: 0xf76b, # [unknown unicode name for Ksmall] - 0x006c: 0xf76c, # [unknown unicode name for Lsmall] - 0x006d: 0xf76d, # [unknown unicode name for Msmall] - 0x006e: 0xf76e, # [unknown unicode name for Nsmall] - 0x006f: 0xf76f, # [unknown unicode name for Osmall] - 0x0070: 0xf770, # [unknown unicode name for Psmall] - 0x0071: 0xf771, # [unknown unicode name for Qsmall] - 0x0072: 0xf772, # [unknown unicode name for Rsmall] - 0x0073: 0xf773, # [unknown unicode name for Ssmall] - 0x0074: 0xf774, # [unknown unicode name for Tsmall] - 0x0075: 0xf775, # [unknown unicode name for Usmall] - 0x0076: 0xf776, # [unknown unicode name for Vsmall] - 0x0077: 0xf777, # [unknown unicode name for Wsmall] - 0x0078: 0xf778, # [unknown unicode name for Xsmall] - 0x0079: 0xf779, # [unknown unicode name for Ysmall] - 0x007a: 0xf77a, # [unknown unicode name for Zsmall] - 0x007b: 0x20a1, # COLON SIGN - 0x007c: 0xf6dc, # [unknown unicode name for onefitted] - 0x007d: 0xf6dd, # [unknown unicode name for rupiah] - 0x007e: 0xf6fe, # [unknown unicode name for Tildesmall] - 0x007f: None, # UNDEFINED - 0x0080: None, # UNDEFINED - 0x0081: 0xf6e9, # [unknown unicode name for asuperior] - 0x0082: 0xf6e0, # [unknown unicode name for centsuperior] - 0x0083: None, # UNDEFINED - 0x0084: None, # UNDEFINED - 0x0085: None, # UNDEFINED - 0x0086: None, # UNDEFINED - 0x0087: 0xf7e1, # [unknown unicode name for Aacutesmall] - 0x0088: 0xf7e0, # [unknown unicode name for Agravesmall] - 0x0089: 0xf7e2, # [unknown unicode name for Acircumflexsmall] - 0x008a: 0xf7e4, # [unknown unicode name for Adieresissmall] - 0x008b: 0xf7e3, # [unknown unicode name for Atildesmall] - 0x008c: 0xf7e5, # [unknown unicode name for Aringsmall] - 0x008d: 0xf7e7, # [unknown unicode name for Ccedillasmall] - 0x008e: 0xf7e9, # [unknown unicode name for Eacutesmall] - 0x008f: 0xf7e8, # [unknown unicode name for Egravesmall] - 0x0090: 0xf7ea, # [unknown unicode name for Ecircumflexsmall] - 0x0091: 0xf7eb, # [unknown unicode name for Edieresissmall] - 0x0092: 0xf7ed, # [unknown unicode name for Iacutesmall] - 0x0093: 0xf7ec, # [unknown unicode name for Igravesmall] - 0x0094: 0xf7ee, # [unknown unicode name for Icircumflexsmall] - 0x0095: 0xf7ef, # [unknown unicode name for Idieresissmall] - 0x0096: 0xf7f1, # [unknown unicode name for Ntildesmall] - 0x0097: 0xf7f3, # [unknown unicode name for Oacutesmall] - 0x0098: 0xf7f2, # [unknown unicode name for Ogravesmall] - 0x0099: 0xf7f4, # [unknown unicode name for Ocircumflexsmall] - 0x009a: 0xf7f6, # [unknown unicode name for Odieresissmall] - 0x009b: 0xf7f5, # [unknown unicode name for Otildesmall] - 0x009c: 0xf7fa, # [unknown unicode name for Uacutesmall] - 0x009d: 0xf7f9, # [unknown unicode name for Ugravesmall] - 0x009e: 0xf7fb, # [unknown unicode name for Ucircumflexsmall] - 0x009f: 0xf7fc, # [unknown unicode name for Udieresissmall] - 0x00a0: None, # UNDEFINED - 0x00a1: 0x2078, # SUPERSCRIPT EIGHT - 0x00a2: 0x2084, # SUBSCRIPT FOUR - 0x00a3: 0x2083, # SUBSCRIPT THREE - 0x00a4: 0x2086, # SUBSCRIPT SIX - 0x00a5: 0x2088, # SUBSCRIPT EIGHT - 0x00a6: 0x2087, # SUBSCRIPT SEVEN - 0x00a7: 0xf6fd, # [unknown unicode name for Scaronsmall] - 0x00a8: None, # UNDEFINED - 0x00a9: 0xf6df, # [unknown unicode name for centinferior] - 0x00aa: 0x2082, # SUBSCRIPT TWO - 0x00ab: None, # UNDEFINED - 0x00ac: 0xf7a8, # [unknown unicode name for Dieresissmall] - 0x00ad: None, # UNDEFINED - 0x00ae: 0xf6f5, # [unknown unicode name for Caronsmall] - 0x00af: 0xf6f0, # [unknown unicode name for osuperior] - 0x00b0: 0x2085, # SUBSCRIPT FIVE - 0x00b1: None, # UNDEFINED - 0x00b2: 0xf6e1, # [unknown unicode name for commainferior] - 0x00b3: 0xf6e7, # [unknown unicode name for periodinferior] - 0x00b4: 0xf7fd, # [unknown unicode name for Yacutesmall] - 0x00b5: None, # UNDEFINED - 0x00b6: 0xf6e3, # [unknown unicode name for dollarinferior] - 0x00b7: None, # UNDEFINED - 0x00b8: None, # UNDEFINED - 0x00b9: 0xf7fe, # [unknown unicode name for Thornsmall] - 0x00ba: None, # UNDEFINED - 0x00bb: 0x2089, # SUBSCRIPT NINE - 0x00bc: 0x2080, # SUBSCRIPT ZERO - 0x00bd: 0xf6ff, # [unknown unicode name for Zcaronsmall] - 0x00be: 0xf7e6, # [unknown unicode name for AEsmall] - 0x00bf: 0xf7f8, # [unknown unicode name for Oslashsmall] - 0x00c0: 0xf7bf, # [unknown unicode name for questiondownsmall] - 0x00c1: 0x2081, # SUBSCRIPT ONE - 0x00c2: 0xf6f9, # [unknown unicode name for Lslashsmall] - 0x00c3: None, # UNDEFINED - 0x00c4: None, # UNDEFINED - 0x00c5: None, # UNDEFINED - 0x00c6: None, # UNDEFINED - 0x00c7: None, # UNDEFINED - 0x00c8: None, # UNDEFINED - 0x00c9: 0xf7b8, # [unknown unicode name for Cedillasmall] - 0x00ca: None, # UNDEFINED - 0x00cb: None, # UNDEFINED - 0x00cc: None, # UNDEFINED - 0x00cd: None, # UNDEFINED - 0x00ce: None, # UNDEFINED - 0x00cf: 0xf6fa, # [unknown unicode name for OEsmall] - 0x00d0: 0x2012, # FIGURE DASH - 0x00d1: 0xf6e6, # [unknown unicode name for hyphensuperior] - 0x00d2: None, # UNDEFINED - 0x00d3: None, # UNDEFINED - 0x00d4: None, # UNDEFINED - 0x00d5: None, # UNDEFINED - 0x00d6: 0xf7a1, # [unknown unicode name for exclamdownsmall] - 0x00d7: None, # UNDEFINED - 0x00d8: 0xf7ff, # [unknown unicode name for Ydieresissmall] - 0x00d9: None, # UNDEFINED - 0x00da: 0x00b9, # SUPERSCRIPT ONE - 0x00db: 0x00b2, # SUPERSCRIPT TWO - 0x00dc: 0x00b3, # SUPERSCRIPT THREE - 0x00dd: 0x2074, # SUPERSCRIPT FOUR - 0x00de: 0x2075, # SUPERSCRIPT FIVE - 0x00df: 0x2076, # SUPERSCRIPT SIX - 0x00e0: 0x2077, # SUPERSCRIPT SEVEN - 0x00e1: 0x2079, # SUPERSCRIPT NINE - 0x00e2: 0x2070, # SUPERSCRIPT ZERO - 0x00e3: None, # UNDEFINED - 0x00e4: 0xf6ec, # [unknown unicode name for esuperior] - 0x00e5: 0xf6f1, # [unknown unicode name for rsuperior] - 0x00e6: 0xf6f3, # [unknown unicode name for tsuperior] - 0x00e7: None, # UNDEFINED - 0x00e8: None, # UNDEFINED - 0x00e9: 0xf6ed, # [unknown unicode name for isuperior] - 0x00ea: 0xf6f2, # [unknown unicode name for ssuperior] - 0x00eb: 0xf6eb, # [unknown unicode name for dsuperior] - 0x00ec: None, # UNDEFINED - 0x00ed: None, # UNDEFINED - 0x00ee: None, # UNDEFINED - 0x00ef: None, # UNDEFINED - 0x00f0: None, # UNDEFINED - 0x00f1: 0xf6ee, # [unknown unicode name for lsuperior] - 0x00f2: 0xf6fb, # [unknown unicode name for Ogoneksmall] - 0x00f3: 0xf6f4, # [unknown unicode name for Brevesmall] - 0x00f4: 0xf7af, # [unknown unicode name for Macronsmall] - 0x00f5: 0xf6ea, # [unknown unicode name for bsuperior] - 0x00f6: 0x207f, # SUPERSCRIPT LATIN SMALL LETTER N - 0x00f7: 0xf6ef, # [unknown unicode name for msuperior] - 0x00f8: 0xf6e2, # [unknown unicode name for commasuperior] - 0x00f9: 0xf6e8, # [unknown unicode name for periodsuperior] - 0x00fa: 0xf6f7, # [unknown unicode name for Dotaccentsmall] - 0x00fb: 0xf6fc, # [unknown unicode name for Ringsmall] - 0x00fc: None, # UNDEFINED - 0x00fd: None, # UNDEFINED - 0x00fe: None, # UNDEFINED - 0x00ff: None, # UNDEFINED - },None), - } - #for k,v in __rl_codecs_data.items(): - # __rl_codecs_data[k+'enc'] = __rl_codecs_data[k+'encoding'] = v - #del k,v - - def __init__(self): - raise NotImplementedError - - def _256_exception_codec((exceptions,rexceptions)): - import codecs - decoding_map = codecs.make_identity_dict(xrange(32,256)) - decoding_map.update(exceptions) - encoding_map = codecs.make_encoding_map(decoding_map) - if rexceptions: encoding_map.update(rexceptions) - ### Codec APIs - class Codec(codecs.Codec): - def encode(self,input,errors='strict',charmap_encode=codecs.charmap_encode,encoding_map=encoding_map): - return charmap_encode(input,errors,encoding_map) - - def decode(self,input,errors='strict',charmap_decode=codecs.charmap_decode,decoding_map=decoding_map): - return charmap_decode(input,errors,decoding_map) - - class StreamWriter(Codec,codecs.StreamWriter): - pass - - class StreamReader(Codec,codecs.StreamReader): - pass - C = Codec() - return (C.encode,C.decode,StreamReader,StreamWriter) - _256_exception_codec=staticmethod(_256_exception_codec) - - __rl_codecs_cache = {} - - def __rl_codecs(name,cache=__rl_codecs_cache,data=__rl_codecs_data): - try: - return cache[name] - except KeyError: - cache[name] = c = RL_Codecs._256_exception_codec( - data[name]) - return c - __rl_codecs=staticmethod(__rl_codecs) - - def _rl_codecs(name): - name = name.lower() - from pdfmetrics import standardEncodings - for e in standardEncodings: - e = e[:-8].lower() - if name.startswith(e): return RL_Codecs.__rl_codecs(e) - return None - _rl_codecs=staticmethod(_rl_codecs) - - def register(): - import codecs - codecs.register(RL_Codecs._rl_codecs) - register=staticmethod(register) diff --git a/dist-packages/wordaxe/wordaxe/rl/styles.py b/dist-packages/wordaxe/wordaxe/rl/styles.py deleted file mode 100755 index 996df497c..000000000 --- a/dist-packages/wordaxe/wordaxe/rl/styles.py +++ /dev/null @@ -1,118 +0,0 @@ -#copyright ReportLab Inc. 2000 -#see license.txt for license details -#history http://cvs.sourceforge.net/cgi-bin/cvsweb.cgi/reportlab/lib/styles.py?cvsroot=reportlab -#$Header: /cvsroot/deco-cow/hyphenation/reportlab/lib/styles.py,v 1.1.1.1 2004/04/27 21:18:54 hvbargen Exp $ -__version__=''' $Id: styles.py,v 1.1.1.1 2004/04/27 21:18:54 hvbargen Exp $ ''' - -from reportlab.lib.colors import white, black -from reportlab.lib.enums import TA_LEFT, TA_CENTER - -from reportlab.lib.styles import * - -_orig_ParagraphStyle = ParagraphStyle -class ParagraphStyle(_orig_ParagraphStyle): - defaults = dict(_orig_ParagraphStyle.defaults.items()) - defaults.update ({ - 'language':None, - 'hyphenation':False, - 'backColor':None, - 'kerning':False, - }) - -# From here on, the rest is copied from the original styles.py. -# We have to do it this way, otherwise the additional default -# values for language and hyphenation will not be contained -# in the sample stylesheet. - -def testStyles(): - pNormal = ParagraphStyle('Normal',None) - pNormal.fontName = 'Times-Roman' - pNormal.fontSize = 12 - pNormal.leading = 14.4 - - pNormal.listAttrs() - print - pPre = ParagraphStyle('Literal', pNormal) - pPre.fontName = 'Courier' - pPre.listAttrs() - return pNormal, pPre - -def getSampleStyleSheet(): - """Returns a stylesheet object""" - stylesheet = StyleSheet1() - - stylesheet.add(ParagraphStyle(name='Normal', - fontName='Times-Roman', - fontSize=10, - leading=12) - ) - - stylesheet.add(ParagraphStyle(name='BodyText', - parent=stylesheet['Normal'], - spaceBefore=6) - ) - stylesheet.add(ParagraphStyle(name='Italic', - parent=stylesheet['BodyText'], - fontName = 'Times-Italic') - ) - - stylesheet.add(ParagraphStyle(name='Heading1', - parent=stylesheet['Normal'], - fontName = 'Times-Bold', - fontSize=18, - leading=22, - spaceAfter=6), - alias='h1') - - stylesheet.add(ParagraphStyle(name='Title', - parent=stylesheet['Normal'], - fontName = 'Times-Bold', - fontSize=18, - leading=22, - alignment=TA_CENTER, - spaceAfter=6), - alias='title') - - stylesheet.add(ParagraphStyle(name='Heading2', - parent=stylesheet['Normal'], - fontName = 'Times-Bold', - fontSize=14, - leading=18, - spaceBefore=12, - spaceAfter=6), - alias='h2') - - stylesheet.add(ParagraphStyle(name='Heading3', - parent=stylesheet['Normal'], - fontName = 'Times-BoldItalic', - fontSize=12, - leading=14, - spaceBefore=12, - spaceAfter=6), - alias='h3') - - stylesheet.add(ParagraphStyle(name='Bullet', - parent=stylesheet['Normal'], - firstLineIndent=0, - spaceBefore=3), - alias='bu') - - stylesheet.add(ParagraphStyle(name='Definition', - parent=stylesheet['Normal'], - firstLineIndent=0, - leftIndent=36, - bulletIndent=0, - spaceBefore=6, - bulletFontName='Times-BoldItalic'), - alias='df') - - stylesheet.add(ParagraphStyle(name='Code', - parent=stylesheet['Normal'], - fontName='Courier', - fontSize=8, - leading=8.8, - firstLineIndent=0, - leftIndent=36)) - - - return stylesheet diff --git a/dist-packages/wordaxe/wordaxe/rl/xpreformatted.py b/dist-packages/wordaxe/wordaxe/rl/xpreformatted.py deleted file mode 100755 index 854fc56e9..000000000 --- a/dist-packages/wordaxe/wordaxe/rl/xpreformatted.py +++ /dev/null @@ -1,101 +0,0 @@ -#Copyright ReportLab Europe Ltd. 2000-2004 -#see license.txt for license details -#history http://www.reportlab.co.uk/cgi-bin/viewcvs.cgi/public/reportlab/trunk/reportlab/platypus/xpreformatted.py -__version__=''' $Id: xpreformatted.py 2426 2004-09-02 11:52:56Z rgbecker $ ''' - -from reportlab.platypus.xpreformatted import * - -_orig_preformatted = XPreformatted - -class XPreformatted(_orig_preformatted): - def __init__(self, text, style, bulletText = None, frags=None, caseSensitive=1, dedent=0, encoding='utf8'): - self.encoding = encoding - _orig_preformatted.__init__(self, text, style, bulletText, frags, caseSensitive, dedent) - -if __name__=='__main__': #NORUNTESTS - def dumpXPreformattedLines(P): - print '\n############dumpXPreforemattedLines(%s)' % str(P) - lines = P.blPara.lines - n =len(lines) - for l in range(n): - line = lines[l] - words = line.words - nwords = len(words) - print 'line%d: %d(%d)\n ' % (l,nwords,line.wordCount), - for w in range(nwords): - print "%d:'%s'"%(w,words[w].text), - print - - def dumpXPreformattedFrags(P): - print '\n############dumpXPreforemattedFrags(%s)' % str(P) - frags = P.frags - n =len(frags) - for l in range(n): - print "frag%d: '%s'" % (l, frags[l].text) - - l = 0 - for L in _getFragLines(frags): - n=0 - for W in _getFragWords(L): - print "frag%d.%d: size=%d" % (l, n, W[0]), - n = n + 1 - for w in W[1:]: - print "'%s'" % w[1], - print - l = l + 1 - - def try_it(text,style,dedent,aW,aH): - P=XPreformatted(text,style,dedent=dedent) - dumpXPreformattedFrags(P) - w,h = P.wrap(aW, aH) - dumpXPreformattedLines(P) - S = P.split(aW,aH) - dumpXPreformattedLines(P) - for s in S: - s.wrap(aW,aH) - dumpXPreformattedLines(s) - aH = 500 - - from wordaxe.rl.styles import getSampleStyleSheet, ParagraphStyle - styleSheet = getSampleStyleSheet() - B = styleSheet['BodyText'] - DTstyle = ParagraphStyle("discussiontext", parent=B) - DTstyle.fontName= 'Helvetica' - for (text,dedent,style, aW, aH, active) in [(''' - - -The CMYK or subtractive - -method follows the way a printer -mixes three pigments (cyan, magenta, and yellow) to form colors. -Because mixing chemicals is more difficult than combining light there -is a fourth parameter for darkness. For example a chemical -combination of the CMY pigments generally never makes a perfect - -black -- instead producing a muddy color -- so, to get black printers -don't use the CMY pigments but use a direct black ink. Because -CMYK maps more directly to the way printer hardware works it may -be the case that &| & | colors specified in CMYK will provide better fidelity -and better control when printed. - - -''',0,DTstyle, 456.0, 42.8, 0), -(''' - - This is a non rearranging form of the Paragraph class; - XML tags are allowed in text and have the same - - meanings as for the Paragraph class. - As for Preformatted, if dedent is non zero dedent - common leading spaces will be removed from the - front of each line. - -''',3, DTstyle, 456.0, 42.8, 0), -("""\ - class FastXMLParser: - # Nonsense method - def nonsense(self): - self.foo = 'bar' -""",0, styleSheet['Code'], 456.0, 4.8, 1), -]: - if active: try_it(text,style,dedent,aW,aH)