commit 3b1fffade1473f20f2558733fbd218f4580fc7c3 Author: Rasmus Andersson Date: Tue Aug 22 00:05:20 2017 -0700 Initial public commit diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 000000000..f1917bd5e --- /dev/null +++ b/.gitattributes @@ -0,0 +1,2 @@ +# Use CRLF for line endings in the Windows install "readme" file +misc/doc/install-win.txt text eol=crlf diff --git a/.gitignore b/.gitignore new file mode 100644 index 000000000..a44425952 --- /dev/null +++ b/.gitignore @@ -0,0 +1,19 @@ +*.pyc +*.pyo +*.ttx +*.o +*.d +*.core +*.obj +*.exe +*.patch +*.diff +_*.ignore +*~ +.DS_Store +*.sparseimage + +build +/_* + +src/FontInspector.html diff --git a/LICENSE.txt b/LICENSE.txt new file mode 100644 index 000000000..68c39d5f7 --- /dev/null +++ b/LICENSE.txt @@ -0,0 +1,92 @@ +Copyright (c) 2017 The Interface Project Authors (me@rsms.me) + +This Font Software is licensed under the SIL Open Font License, Version 1.1. +This license is copied below, and is also available with a FAQ at: +http://scripts.sil.org/OFL + +----------------------------------------------------------- +SIL OPEN FONT LICENSE Version 1.1 - 26 February 2007 +----------------------------------------------------------- + +PREAMBLE +The goals of the Open Font License (OFL) are to stimulate worldwide +development of collaborative font projects, to support the font creation +efforts of academic and linguistic communities, and to provide a free and +open framework in which fonts may be shared and improved in partnership +with others. + +The OFL allows the licensed fonts to be used, studied, modified and +redistributed freely as long as they are not sold by themselves. The +fonts, including any derivative works, can be bundled, embedded, +redistributed and/or sold with any software provided that any reserved +names are not used by derivative works. The fonts and derivatives, +however, cannot be released under any other type of license. The +requirement for fonts to remain under this license does not apply +to any document created using the fonts or their derivatives. + +DEFINITIONS +"Font Software" refers to the set of files released by the Copyright +Holder(s) under this license and clearly marked as such. This may +include source files, build scripts and documentation. + +"Reserved Font Name" refers to any names specified as such after the +copyright statement(s). + +"Original Version" refers to the collection of Font Software components as +distributed by the Copyright Holder(s). + +"Modified Version" refers to any derivative made by adding to, deleting, +or substituting -- in part or in whole -- any of the components of the +Original Version, by changing formats or by porting the Font Software to a +new environment. + +"Author" refers to any designer, engineer, programmer, technical +writer or other person who contributed to the Font Software. + +PERMISSION AND CONDITIONS +Permission is hereby granted, free of charge, to any person obtaining +a copy of the Font Software, to use, study, copy, merge, embed, modify, +redistribute, and sell modified and unmodified copies of the Font +Software, subject to the following conditions: + +1) Neither the Font Software nor any of its individual components, +in Original or Modified Versions, may be sold by itself. + +2) Original or Modified Versions of the Font Software may be bundled, +redistributed and/or sold with any software, provided that each copy +contains the above copyright notice and this license. These can be +included either as stand-alone text files, human-readable headers or +in the appropriate machine-readable metadata fields within text or +binary files as long as those fields can be easily viewed by the user. + +3) No Modified Version of the Font Software may use the Reserved Font +Name(s) unless explicit written permission is granted by the corresponding +Copyright Holder. This restriction only applies to the primary font name as +presented to the users. + +4) The name(s) of the Copyright Holder(s) or the Author(s) of the Font +Software shall not be used to promote, endorse or advertise any +Modified Version, except to acknowledge the contribution(s) of the +Copyright Holder(s) and the Author(s) or with their explicit written +permission. + +5) The Font Software, modified or unmodified, in part or in whole, +must be distributed entirely under this license, and must not be +distributed under any other license. The requirement for fonts to +remain under this license does not apply to any document created +using the Font Software. + +TERMINATION +This license becomes null and void if any of the above conditions are +not met. + +DISCLAIMER +THE FONT SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO ANY WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT +OF COPYRIGHT, PATENT, TRADEMARK, OR OTHER RIGHT. IN NO EVENT SHALL THE +COPYRIGHT HOLDER BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +INCLUDING ANY GENERAL, SPECIAL, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL +DAMAGES, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF THE USE OR INABILITY TO USE THE FONT SOFTWARE OR FROM +OTHER DEALINGS IN THE FONT SOFTWARE. diff --git a/Makefile b/Makefile new file mode 100644 index 000000000..6ce4dda73 --- /dev/null +++ b/Makefile @@ -0,0 +1,106 @@ +# Targets: +# all Build all styles in all formats (default) +# all_ttf Build all styles as TrueType +# STYLE Build STYLE in all formats (e.g. MediumItalic) +# STYLE_ttf Build STYLE as TrueType (e.g. MediumItalic_ttf) +# zip Build all styles as TrueType and package into a zip archive +# +all: all_web all_otf + +# generated.make is automatically generated by init.sh and defines depenencies for +# all styles and alias targets +include build/etc/generated.make + +res_files := src/fontbuild.cfg src/diacritics.txt src/glyphlist.txt src/glyphorder.txt + +# UFO -> TTF & OTF (note that UFO deps are defined by generated.make) +build/tmp/InterfaceTTF/Interface-%.ttf: $(res_files) + misc/ufocompile --otf $* + +build/tmp/InterfaceOTF/Interface-%.otf: build/tmp/InterfaceTTF/Interface-%.ttf $(res_files) + @true + +# build/tmp/ttf -> build (generated.make handles build/tmp/InterfaceTTF/Interface-%.ttf) +build/dist-unhinted/Interface-%.ttf: build/tmp/InterfaceTTF/Interface-%.ttf + @mkdir -p build/dist-unhinted + cp -a "$<" "$@" + +# OTF +build/dist-unhinted/Interface-%.otf: build/tmp/InterfaceOTF/Interface-%.otf + cp -a "$<" "$@" + +build/dist: + @mkdir -p build/dist + +# autohint +build/dist/Interface-%.ttf: build/dist-unhinted/Interface-%.ttf build/dist + ttfautohint \ + --hinting-limit=256 \ + --hinting-range-min=8 \ + --hinting-range-max=64 \ + --fallback-stem-width=256 \ + --strong-stem-width=D \ + --no-info \ + --verbose \ + "$<" "$@" + +# TTF -> WOFF2 +build/%.woff2: build/%.ttf + woff2_compress "$<" + +# TTF -> WOFF +build/%.woff: build/%.ttf + ttf2woff -O -t woff "$<" "$@" + +# TTF -> EOT (disabled) +# build/%.eot: build/%.ttf +# ttf2eot "$<" > "$@" + +# TTF -> zip +zip: all + @rm -rf build/.zip + @rm -f build/.zip.zip + @mkdir -p \ + "build/.zip/Interface (web)" \ + "build/.zip/Interface (hinted TTF)" \ + "build/.zip/Interface (TTF)" \ + "build/.zip/Interface (OTF)" + cp -a build/dist/*.woff build/dist/*.woff2 "build/.zip/Interface (web)/" + cp -a build/dist/*.ttf "build/.zip/Interface (hinted TTF)/" + cp -a build/dist-unhinted/*.ttf "build/.zip/Interface (TTF)/" + cp -a build/dist-unhinted/*.otf "build/.zip/Interface (OTF)/" + cp -a misc/doc/install-*.txt "build/.zip/" + cd build/.zip && zip -v -X -r "../../build/.zip.zip" * + @mkdir -p build/release + @mv -f build/.zip.zip build/release/Interface-`date '+%Y%m%d'`.zip + @echo write build/release/Interface-`date '+%Y%m%d'`.zip + @rm -rf build/.zip + +install_ttf: all_ttf + @echo "Installing TTF files locally at ~/Library/Fonts/Interface" + rm -rf ~/Library/Fonts/Interface + mkdir -p ~/Library/Fonts/Interface + cp -va build/dist/*.ttf ~/Library/Fonts/Interface + +install_otf: all_otf + @echo "Installing OTF files locally at ~/Library/Fonts/Interface" + rm -rf ~/Library/Fonts/Interface + mkdir -p ~/Library/Fonts/Interface + cp -va build/dist-unhinted/*.otf ~/Library/Fonts/Interface + +install: all install_otf + +glyphinfo: _local/UnicodeData.txt + misc/gen-glyphinfo.py -ucd _local/UnicodeData.txt \ + src/Interface-*.ufo > misc/preview/glyphinfo.json + +# Download latest Unicode data +_local/UnicodeData.txt: + @mkdir -p _local + curl -s '-#' -o "$@" \ + http://www.unicode.org/Public/UCD/latest/ucd/UnicodeData.txt + +clean: + rm -vrf build/tmp/* build/dist/Interface-*.* + +.PHONY: all web clean install install_otf install_ttf deploy zip glyphinfo diff --git a/README.md b/README.md new file mode 100644 index 000000000..8654874bb --- /dev/null +++ b/README.md @@ -0,0 +1,207 @@ +# Interface + +Interface is a typeface specially designed for user interfaces, with excellent ligibility at small sizes. + +![Sample](docs/res/sample.png) + +### [⬇︎ Download the latest release](https://github.com/rsms/interface/releases) + +After downloading the zip from above: + +1. Double-click the downloaded zip file to unpack or open it. +2. Follow the instructions in "install-mac.txt" or "install-win.txt", depending + on what operating system you're using. + + +## Design + +Interface is similar to Roboto, San Francisco, Akkurat, Asap, Lucida Grande and other "UI" typefaces. Some trade-offs were made in order to make this typeface work really well at small sizes: + +- Currently not suitable for very large sizes because of some small-scale glyph optimizations (like "pits" and "traps") that help rasterization at small sizes but stand out and interfere at large sizes. +- Rasterized at sizes below 12px, some stems—like the horizontal center of "E", "F", or vertical center of "m"—are drawn with two semi-opaque pixels instead of one solid. This is because we "prioritize" (optimize for) higher-denisty rasterizations. If we move these stems to an off-center position—so that they can be drawn sharply at e.g. 11px—text will be less legible at higher resolutions. + +Current font styles: + +- Regular — master + - Italic +- Bold — master + - BoldItalic +- Medium — derived from Regular and Bold by mixing + - MediumItalic +- Black — derived from Regular and Bold by mixing + - BlackItalic + +Future versions will hopefully include lighter weights. + + +### Font metrics + +This font was originally designed to work at a specific size: 11px. Thus, the Units per [EM](https://en.wikipedia.org/wiki/Em_(typography)) (UPM) is defined in such a way that a power-of-two multiple of one EM unit ends up at an integer value compared to a pixel. Most fonts are designed with a UPM of either 1000 or 2048. Because of this we picked a value that is as high as possible but also as close as possible to one of those common values (since it's reasonable to assume that some layout engines and rasterizers are optimized for those value magnitudes.) We ended up picking a UPM of 2816 which equates to exactly 256 units per pixel when rasterized for size 11pt at 1x scale. This also means that when rasterized at power-of-two scales (like 2x and 4x) the number of EM units corresponding to a pixel is an integer (128 units for 2x, 64 for 4x, and so on.) + +However, as the project progressed and the typeface was put into use, it quickly +bacame clear that for anything longer than a short word, it was actually hard to +read the almost monotonically-spaced letters. + +A second major revision was create where the previously-strict rule of geometry being even multiples of 256 was relaxed and now the rule is "try to stick with 128x, if you can't, stick with 64x and if you can't do that either, never go below 16x." This means that Interface is now much more variable in pace than it used to be, making it work better at higher resolutions and work much better in longer text, but losing some contrast and sharpness at small sizes. + +![Metrics](docs/res/metrics.png) + +The glyphs are designed based on this "plan"; most stems and lines will be positioned at EM units that are even multiples of 128, and in a few cases they are at even multiples of 64 or as low as 16. + +Metrics: + +- UPM: 2816 +- Descender: -640 +- x-height: 1536 +- Cap height: 2048 +- Ascender: 2688 + +Translating between EM units and pixels: + +- Rasterized at 11px: 1px = 256 units +- Rasterized at 22px: 1px = 128 units +- Rasterized at 44px: 1px = 64 units + +There's a Figma workspace for glyphs, with configured metrics: ["Interface glyphs"](https://www.figma.com/file/RtScFU5NETY3j9E0yOmnW4gv/Interface-glyphs) + + +## Contributing + +By contributing work to the Interface font project you agree to have all work +contributed becoming the intellectual property of the Interface font project as +described by [SIL Open Font License, Version 1.1](http://scripts.sil.org/OFL) + +### Building + +Prerequisites: + +- Python 2.7 with pip (you get pip with `brew install python`) +- [virtualenv](https://virtualenv.pypa.io/) + +``` +$ ./init.sh +``` + +This will generate makefile support, dependencies required by the toolchain, etc. +At the end, the script prints instructions for how to activate `virtualenv`. +As a convenience, you can also source init.sh to activate virtualenv. + +We can now run `make` to build all font files: + +``` +$ make +``` + +Or just specific styles: + +``` +$ make Regular BoldItalic +``` + +Or all fonts but only TrueType format (no web file formats): + +``` +$ make all_ttf +``` + +Or just specific styles and formats: + +``` +# Regular in all formats, BoldItalic in only TrueType format +$ make Regular BoldItalic_ttf +``` + +You can also specify specific style + file format to `make` through `build/Interface-STYLE.FORMAT`. +E.g. + +- `make build/Interface-MediumItalic.eot` +- `make build/Interface-Bold.woff2` +- `make build/Interface-Regular.ttf` +- `...` + +All resulting font files are written to the `build` directory with `Interface-` as the filename prefix. + +Note: Making all files takes a considerable amount of time. +It's a CPU and I/O intensive task to compile the fonts and so the build system has been setup to +be able to run many jobs in parallel. Therefore it's recommended to pass the [`-j` flag to make](https://www.gnu.org/software/make/manual/html_node/Parallel.html) and +optionally pipe the fairly verbose output to /dev/null, e.g. `make -j 8 >/dev/null`. + + +### Editing + +This font is stored and authored in the [Unified Font Object (UFO)](http://unifiedfontobject.org/) file format and can be edited by many different software, some free. However, it's only been "tested" with [RoboFont](http://robofont.com/) which is a popular commercial font editor. There's a 30 day fully-functional free trial version of the app, so you can use it for smaller contributions without needing to buy a RoboFont license. + +To make life easier for you, configure RoboFont's settings like this: + +- Set the grid to 128 units. This means that each grid square equals one pixel at 2x scale. +- Set "Snap points to" to a reasonably high number that's a power-of-two, like 8. +- Set "SHIFT increment" to 16 +- Set "CMD SHIFT increment" to 128 + +When you've made an edit, simply save your changes and run make: + +``` +$ make +``` + +*For quick turnaround, consider:* + +- Build and test only the "Regular" style. +- Use `misc/notify` to get desktop notifications on builds so that you don't have to sit and wait looking at the terminal while it's building. + +E.g. `misc/notify make Regular` + +See ["Building"](#Building) for more details. + + +### Preview & debug + +This project comes with a simple web-based application for debugging and +previewing the font. It's a very useful tool to have when working on the font. + +- Comes with a large body of sample text data (which is also editable.) +- Provides samples of the most common latin-script pairs, useful for kerning. +- Provides samples of words ordered by commonality in latin scripts with a + preference for English (accessible via common-pair samples.) +- Can show the complete repertoire of the fonts, with correct glyph order and + even RoboFont color labels ("marks"). +- Controls for basic font properties like family, weight, italic, size, + line-height, letter-spacing, etc. +- Controls for a lot of font features like ligature sets, contextual alternates, + alternate numerics, etc. +- Controls for web-browser text features like `captialize`, `uppercase`, + `lowercase`, etc. +- Ability to compare Interface side-by-side with other fonts. + +![Preview app screenshot](docs/res/preview-app.png) + +The following will start a local web server (which is only accessable from your local computer; not the internet) that serves the debug-and-preview app: + +``` +$ docs/serve.sh & +``` + +You can now visit `http://localhost:2015/lab/`. +After you rebuild some font files, reload the web page to refresh fonts. + + +## FAQ + +> Do I need RoboFont? + +No, you don't. To build font files, all you need is Python. To edit the font files, you need something that can edit UFO files (like [RoboFont](http://robofont.com/) or a text editor.) + + +> `KeyError: 'Lj'` when building + +This probably means that you need to run `./init.sh` to setup the case-sensitive virtual file system mount that is needed by the font build system. Unfortunately the toolchain used (which is the same as for Roboto) requires not only a case-preserving file system, but also a case-sensitive one. + + +> `ImportError: No module named robofab.objects.objectsRF` + +Python virtualenv not configured. Run `. init.sh` + + +> `make: *** No rule to make target ...` + +Run `./init.sh` to update the generated makefile. diff --git a/docs/README.md b/docs/README.md new file mode 100644 index 000000000..916fe231f --- /dev/null +++ b/docs/README.md @@ -0,0 +1 @@ +This directory is published as a website by Github at [https://rsms.me/interface](https://rsms.me/interface/) diff --git a/docs/favicon.ico b/docs/favicon.ico new file mode 100644 index 000000000..7a0223b01 Binary files /dev/null and b/docs/favicon.ico differ diff --git a/docs/font-files/Interface-Bold.woff b/docs/font-files/Interface-Bold.woff new file mode 100644 index 000000000..64f47f6a9 Binary files /dev/null and b/docs/font-files/Interface-Bold.woff differ diff --git a/docs/font-files/Interface-Bold.woff2 b/docs/font-files/Interface-Bold.woff2 new file mode 100644 index 000000000..d070e4215 Binary files /dev/null and b/docs/font-files/Interface-Bold.woff2 differ diff --git a/docs/font-files/Interface-BoldItalic.woff b/docs/font-files/Interface-BoldItalic.woff new file mode 100644 index 000000000..48b0cd166 Binary files /dev/null and b/docs/font-files/Interface-BoldItalic.woff differ diff --git a/docs/font-files/Interface-BoldItalic.woff2 b/docs/font-files/Interface-BoldItalic.woff2 new file mode 100644 index 000000000..31b63b75b Binary files /dev/null and b/docs/font-files/Interface-BoldItalic.woff2 differ diff --git a/docs/font-files/Interface-Medium.woff b/docs/font-files/Interface-Medium.woff new file mode 100644 index 000000000..358048438 Binary files /dev/null and b/docs/font-files/Interface-Medium.woff differ diff --git a/docs/font-files/Interface-Medium.woff2 b/docs/font-files/Interface-Medium.woff2 new file mode 100644 index 000000000..e89d86d0c Binary files /dev/null and b/docs/font-files/Interface-Medium.woff2 differ diff --git a/docs/font-files/Interface-MediumItalic.woff b/docs/font-files/Interface-MediumItalic.woff new file mode 100644 index 000000000..86dbff7cf Binary files /dev/null and b/docs/font-files/Interface-MediumItalic.woff differ diff --git a/docs/font-files/Interface-MediumItalic.woff2 b/docs/font-files/Interface-MediumItalic.woff2 new file mode 100644 index 000000000..dfbbf688d Binary files /dev/null and b/docs/font-files/Interface-MediumItalic.woff2 differ diff --git a/docs/font-files/Interface-Regular.woff b/docs/font-files/Interface-Regular.woff new file mode 100644 index 000000000..bfcb732c1 Binary files /dev/null and b/docs/font-files/Interface-Regular.woff differ diff --git a/docs/font-files/Interface-Regular.woff2 b/docs/font-files/Interface-Regular.woff2 new file mode 100644 index 000000000..a2dc05cad Binary files /dev/null and b/docs/font-files/Interface-Regular.woff2 differ diff --git a/docs/font-files/Interface-RegularItalic.woff b/docs/font-files/Interface-RegularItalic.woff new file mode 100644 index 000000000..dcccd8f8d Binary files /dev/null and b/docs/font-files/Interface-RegularItalic.woff differ diff --git a/docs/font-files/Interface-RegularItalic.woff2 b/docs/font-files/Interface-RegularItalic.woff2 new file mode 100644 index 000000000..b9d08d3f9 Binary files /dev/null and b/docs/font-files/Interface-RegularItalic.woff2 differ diff --git a/docs/index.html b/docs/index.html new file mode 100644 index 000000000..f247422c2 --- /dev/null +++ b/docs/index.html @@ -0,0 +1,237 @@ + + + + + Interface font family + + + + + + + + + + + + + + + + + + +
+

The Interface font family

+

+ Interface is a font for highly legible text on computer screens.
+ Download the latest release + or try it out in the playground +

+
+ +
+

Sample

+

+ +

+
+ +
+

How much does it cost?

+

+ Interface is a free and open source font family. You are free to use this font in almost any way imaginable. + Refer to the SIL Open Font License 1.1 for exact details on what the conditions and restrictions are. +

+ +

 

+ +

How do I use it?

+

+ Using the font is as easy as + download & installing locally on your computer. +

+

+ You're free to bundle copies of Interface with your software, even if it's + commercial and you charge money for your software. Interface can also be used + on the web by either hosting the font files yourself or by including this CSS: +

+ @import url('https://rsms.me/interface/interface.css'); +

Use the following CSS rules to specify these families:

+ font-family: Interface, sans-serif; +
+ +
+

The story behind Interface

+

+ Interface started out in late 2016 as an experiment to build a perfectly + pixel-fitting font at a specific small size (11px.) The idea was that + by crafting a font in a particular way, with a particular coordinate system + (Units Per EM), and for a particular target rasterization size (11), it would + be possible to get the best of both sharpness and readability. +

+

+ However after a few months of using an early version of Interface, it dawned + on everyone exposed to the test that this approach had some serious real–world + problems. Most notably that it was really hard to read longer text. Because of + the pixel–aligning nature of that approach, the font took an almost monospaced + appearance, making it really easy to read numbers, punctuation and very short + words, but eye–straining to read anything longer. +

+

+ The project was rebooted with a different approach, sticking with the + raster–spcific UPM, but crafting glyphs and kerning in a way that made for + more variation in the rythm and smoother vertical and horizontal stems. + As Interface was being developed, it was tested on an internal version of + Figma—where the author of Interface works as a designer—and slowly imporved upon based on experience and feedback. +

+ +

 

+ +

Current status & usability

+

+ Interface works great for English–language text, and pretty well for other + latin and cyrillic languages. There's still a lot of work to be done, and + contributions are warmly welcomed. + Please refer to the glyph repertoire + for an overview of currently–available glyphs and their quality. + You can also look at some common non English–language words in the playground. +

+
+ + +
+ — @rsms +
+ + + diff --git a/docs/interface.css b/docs/interface.css new file mode 100644 index 000000000..1b80000e8 --- /dev/null +++ b/docs/interface.css @@ -0,0 +1,44 @@ +@font-face { + font-family: 'Interface'; + font-style: normal; + font-weight: 400; + src: url("https://rsms.me/interface/font-files/Interface-Regular.woff2") format("woff2"), + url("https://rsms.me/interface/font-files/Interface-Regular.woff") format("woff"); +} +@font-face { + font-family: 'Interface'; + font-style: italic; + font-weight: 400; + src: url("https://rsms.me/interface/font-files/Interface-RegularItalic.woff2") format("woff2"), + url("https://rsms.me/interface/font-files/Interface-RegularItalic.woff") format("woff"); +} + +@font-face { + font-family: 'Interface'; + font-style: normal; + font-weight: 500; + src: url("https://rsms.me/interface/font-files/Interface-Medium.woff2") format("woff2"), + url("https://rsms.me/interface/font-files/Interface-Medium.woff") format("woff"); +} +@font-face { + font-family: 'Interface'; + font-style: italic; + font-weight: 500; + src: url("https://rsms.me/interface/font-files/Interface-MediumItalic.woff2") format("woff2"), + url("https://rsms.me/interface/font-files/Interface-MediumItalic.woff") format("woff"); +} + +@font-face { + font-family: 'Interface'; + font-style: normal; + font-weight: 700; + src: url("https://rsms.me/interface/font-files/Interface-Bold.woff2") format("woff2"), + url("https://rsms.me/interface/font-files/Interface-Bold.woff") format("woff"); +} +@font-face { + font-family: 'Interface'; + font-style: italic; + font-weight: 700; + src: url("https://rsms.me/interface/font-files/Interface-BoldItalic.woff2") format("woff2"), + url("https://rsms.me/interface/font-files/Interface-BoldItalic.woff") format("woff"); +} diff --git a/docs/lab/fonts b/docs/lab/fonts new file mode 120000 index 000000000..554d400e1 --- /dev/null +++ b/docs/lab/fonts @@ -0,0 +1 @@ +../../build/dist \ No newline at end of file diff --git a/docs/lab/glyphinfo.json b/docs/lab/glyphinfo.json new file mode 100644 index 000000000..2495aa1cf --- /dev/null +++ b/docs/lab/glyphinfo.json @@ -0,0 +1,3075 @@ +{"glyphs":[ + [".null", 0, "[Control]", "#ff4c4c"] +, ["space", 32, "SPACE", "#ede5f9"] +, ["A", 65, "LATIN CAPITAL LETTER A", "#dbeaf7"] +, ["Lambda", 923, "GREEK CAPITAL LETTER LAMDA"] +, ["B", 66, "LATIN CAPITAL LETTER B", "#dbeaf7"] +, ["C", 67, "LATIN CAPITAL LETTER C", "#dbeaf7"] +, ["D", 68, "LATIN CAPITAL LETTER D", "#dbeaf7"] +, ["E", 69, "LATIN CAPITAL LETTER E", "#dbeaf7"] +, ["F", 70, "LATIN CAPITAL LETTER F", "#dbeaf7"] +, ["G", 71, "LATIN CAPITAL LETTER G", "#dbeaf7"] +, ["H", 72, "LATIN CAPITAL LETTER H", "#dbeaf7"] +, ["Hcedilla", 7720, "LATIN CAPITAL LETTER H WITH CEDILLA"] +, ["Hbar", 294, "LATIN CAPITAL LETTER H WITH STROKE"] +, ["Enhookcyrillic", 1223, "CYRILLIC CAPITAL LETTER EN WITH HOOK"] +, ["Endescendercyrillic", 1186, "CYRILLIC CAPITAL LETTER EN WITH DESCENDER"] +, ["uni04C9", 1225, "CYRILLIC CAPITAL LETTER EN WITH TAIL"] +, ["Enghecyrillic", 1188, "CYRILLIC CAPITAL LIGATURE EN GHE"] +, ["I", 73, "LATIN CAPITAL LETTER I", "#dbeaf7"] +, ["Idieresisacute", 7726, "LATIN CAPITAL LETTER I WITH DIAERESIS AND ACUTE", ""] +, ["Itildebelow", 7724, "LATIN CAPITAL LETTER I WITH TILDE BELOW", ""] +, ["J", 74, "LATIN CAPITAL LETTER J", "#dbeaf7"] +, ["K", 75, "LATIN CAPITAL LETTER K", "#dbeaf7"] +, ["Kabashkircyrillic", 1184, "CYRILLIC CAPITAL LETTER BASHKIR KA"] +, ["Kadescendercyrillic", 1178, "CYRILLIC CAPITAL LETTER KA WITH DESCENDER"] +, ["L", 76, "LATIN CAPITAL LETTER L", "#dbeaf7"] +, ["Lslash", 321, "LATIN CAPITAL LETTER L WITH STROKE"] +, ["M", 77, "LATIN CAPITAL LETTER M", "#dbeaf7"] +, ["uni04CD", 1229, "CYRILLIC CAPITAL LETTER EM WITH TAIL"] +, ["N", 78, "LATIN CAPITAL LETTER N", "#dbeaf7"] +, ["afii10026", 1048, "CYRILLIC CAPITAL LETTER I"] +, ["uni048A", 1162, "CYRILLIC CAPITAL LETTER SHORT I WITH TAIL"] +, ["O", 79, "LATIN CAPITAL LETTER O", "#dbeaf7"] +, ["Oslash", 216, "LATIN CAPITAL LETTER O WITH STROKE"] +, ["Ohorn", 416, "LATIN CAPITAL LETTER O WITH HORN"] +, ["P", 80, "LATIN CAPITAL LETTER P", "#dbeaf7"] +, ["Q", 81, "LATIN CAPITAL LETTER Q", "#dbeaf7"] +, ["R", 82, "LATIN CAPITAL LETTER R", "#dbeaf7"] +, ["response", 8479, "RESPONSE", "#cceff2"] +, ["S", 83, "LATIN CAPITAL LETTER S", "#dbeaf7"] +, ["Germandbls", 7838, "LATIN CAPITAL LETTER SHARP S"] +, ["T", 84, "LATIN CAPITAL LETTER T", "#dbeaf7"] +, ["U", 85, "LATIN CAPITAL LETTER U", "#dbeaf7"] +, ["Uhorn", 431, "LATIN CAPITAL LETTER U WITH HORN"] +, ["V", 86, "LATIN CAPITAL LETTER V", "#dbeaf7"] +, ["W", 87, "LATIN CAPITAL LETTER W", "#dbeaf7"] +, ["X", 88, "LATIN CAPITAL LETTER X", "#dbeaf7"] +, ["Hadescendercyrillic", 1202, "CYRILLIC CAPITAL LETTER HA WITH DESCENDER"] +, ["uni04FC", 1276, "CYRILLIC CAPITAL LETTER HA WITH HOOK"] +, ["Y", 89, "LATIN CAPITAL LETTER Y", "#dbeaf7"] +, ["Z", 90, "LATIN CAPITAL LETTER Z", "#dbeaf7"] +, ["AE", 198, "LATIN CAPITAL LETTER AE"] +, ["OE", 338, "LATIN CAPITAL LIGATURE OE"] +, ["Thorn", 222, "LATIN CAPITAL LETTER THORN"] +, ["Gamma", 915, "GREEK CAPITAL LETTER GAMMA"] +, ["Delta", 916, "GREEK CAPITAL LETTER DELTA"] +, ["Delta", 8710, "INCREMENT"] +, ["Theta", 920, "GREEK CAPITAL LETTER THETA"] +, ["Xi", 926, "GREEK CAPITAL LETTER XI"] +, ["Pi", 928, "GREEK CAPITAL LETTER PI"] +, ["Sigma", 931, "GREEK CAPITAL LETTER SIGMA"] +, ["Phi", 934, "GREEK CAPITAL LETTER PHI"] +, ["Psi", 936, "GREEK CAPITAL LETTER PSI"] +, ["Omega", 937, "GREEK CAPITAL LETTER OMEGA"] +, ["Omega", 8486, "OHM SIGN"] +, ["afii10018", 1041, "CYRILLIC CAPITAL LETTER BE"] +, ["afii10050", 1168, "CYRILLIC CAPITAL LETTER GHE WITH UPTURN"] +, ["uni04FA", 1274, "CYRILLIC CAPITAL LETTER GHE WITH STROKE AND HOOK"] +, ["afii10021", 1044, "CYRILLIC CAPITAL LETTER DE"] +, ["afii10024", 1046, "CYRILLIC CAPITAL LETTER ZHE"] +, ["Zhedescendercyrillic", 1174, "CYRILLIC CAPITAL LETTER ZHE WITH DESCENDER"] +, ["afii10025", 1047, "CYRILLIC CAPITAL LETTER ZE"] +, ["Kaverticalstrokecyrillic", 1180, "CYRILLIC CAPITAL LETTER KA WITH VERTICAL STROKE"] +, ["Kahookcyrillic", 1219, "CYRILLIC CAPITAL LETTER KA WITH HOOK"] +, ["afii10029", 1051, "CYRILLIC CAPITAL LETTER EL"] +, ["uni04C5", 1221, "CYRILLIC CAPITAL LETTER EL WITH TAIL"] +, ["uni052E", 1326, "CYRILLIC CAPITAL LETTER EL WITH DESCENDER"] +, ["uni0512", 1298, "CYRILLIC CAPITAL LETTER EL WITH HOOK"] +, ["afii10147", 1138, "CYRILLIC CAPITAL LETTER FITA"] +, ["uni0524", 1316, "CYRILLIC CAPITAL LETTER PE WITH DESCENDER"] +, ["afii10145", 1039, "CYRILLIC CAPITAL LETTER DZHE"] +, ["uni048E", 1166, "CYRILLIC CAPITAL LETTER ER WITH TICK"] +, ["afii10037", 1059, "CYRILLIC CAPITAL LETTER U"] +, ["afii10038", 1060, "CYRILLIC CAPITAL LETTER EF", ""] +, ["afii10040", 1062, "CYRILLIC CAPITAL LETTER TSE"] +, ["Tetsecyrillic", 1204, "CYRILLIC CAPITAL LIGATURE TE TSE"] +, ["afii10041", 1063, "CYRILLIC CAPITAL LETTER CHE"] +, ["Chedescendercyrillic", 1206, "CYRILLIC CAPITAL LETTER CHE WITH DESCENDER"] +, ["Cheverticalstrokecyrillic", 1208, "CYRILLIC CAPITAL LETTER CHE WITH VERTICAL STROKE"] +, ["Shhacyrillic", 1210, "CYRILLIC CAPITAL LETTER SHHA"] +, ["uni0526", 1318, "CYRILLIC CAPITAL LETTER SHHA WITH DESCENDER"] +, ["afii10060", 1035, "CYRILLIC CAPITAL LETTER TSHE"] +, ["afii10042", 1064, "CYRILLIC CAPITAL LETTER SHA"] +, ["afii10043", 1065, "CYRILLIC CAPITAL LETTER SHCHA"] +, ["afii10044", 1066, "CYRILLIC CAPITAL LETTER HARD SIGN"] +, ["afii10045", 1067, "CYRILLIC CAPITAL LETTER YERU"] +, ["afii10046", 1068, "CYRILLIC CAPITAL LETTER SOFT SIGN"] +, ["afii10058", 1033, "CYRILLIC CAPITAL LETTER LJE"] +, ["afii10059", 1034, "CYRILLIC CAPITAL LETTER NJE"] +, ["afii10047", 1069, "CYRILLIC CAPITAL LETTER E"] +, ["afii10048", 1070, "CYRILLIC CAPITAL LETTER YU"] +, ["afii10049", 1071, "CYRILLIC CAPITAL LETTER YA"] +, ["Ghemiddlehookcyrillic", 1172, "CYRILLIC CAPITAL LETTER GHE WITH MIDDLE HOOK"] +, ["Pemiddlehookcyrillic", 1190, "CYRILLIC CAPITAL LETTER PE WITH MIDDLE HOOK"] +, ["afii10053", 1028, "CYRILLIC CAPITAL LETTER UKRAINIAN IE"] +, ["Haabkhasiancyrillic", 1192, "CYRILLIC CAPITAL LETTER ABKHASIAN HA"] +, ["Cheabkhasiancyrillic", 1212, "CYRILLIC CAPITAL LETTER ABKHASIAN CHE"] +, ["Schwacyrillic", 1240, "CYRILLIC CAPITAL LETTER SCHWA"] +, ["Dzeabkhasiancyrillic", 1248, "CYRILLIC CAPITAL LETTER ABKHASIAN DZE"] +, ["uni0510", 1296, "CYRILLIC CAPITAL LETTER REVERSED ZE"] +, ["afii10148", 1140, "CYRILLIC CAPITAL LETTER IZHITSA"] +, ["afii10051", 1026, "CYRILLIC CAPITAL LETTER DJE"] +, ["a", 97, "LATIN SMALL LETTER A", "#dbeaf7"] +, ["b", 98, "LATIN SMALL LETTER B", "#dbeaf7"] +, ["c", 99, "LATIN SMALL LETTER C", "#dbeaf7"] +, ["d", 100, "LATIN SMALL LETTER D", "#dbeaf7"] +, ["dcroat", 273, "LATIN SMALL LETTER D WITH STROKE"] +, ["e", 101, "LATIN SMALL LETTER E", "#dbeaf7"] +, ["f", 102, "LATIN SMALL LETTER F", "#dbeaf7"] +, ["g", 103, "LATIN SMALL LETTER G", "#dbeaf7"] +, ["h", 104, "LATIN SMALL LETTER H", "#dbeaf7"] +, ["hcedilla", 7721, "LATIN SMALL LETTER H WITH CEDILLA"] +, ["i", 105, "LATIN SMALL LETTER I", "#dbeaf7"] +, ["idieresisacute", 7727, "LATIN SMALL LETTER I WITH DIAERESIS AND ACUTE", ""] +, ["itildebelow", 7725, "LATIN SMALL LETTER I WITH TILDE BELOW", ""] +, ["j", 106, "LATIN SMALL LETTER J", "#dbeaf7"] +, ["k", 107, "LATIN SMALL LETTER K", "#dbeaf7"] +, ["l", 108, "LATIN SMALL LETTER L", "#dbeaf7"] +, ["lslash", 322, "LATIN SMALL LETTER L WITH STROKE"] +, ["m", 109, "LATIN SMALL LETTER M", "#dbeaf7"] +, ["n", 110, "LATIN SMALL LETTER N", "#dbeaf7"] +, ["o", 111, "LATIN SMALL LETTER O", "#dbeaf7"] +, ["oslash", 248, "LATIN SMALL LETTER O WITH STROKE"] +, ["ohorn", 417, "LATIN SMALL LETTER O WITH HORN"] +, ["p", 112, "LATIN SMALL LETTER P", "#dbeaf7"] +, ["q", 113, "LATIN SMALL LETTER Q", "#dbeaf7"] +, ["r", 114, "LATIN SMALL LETTER R", "#dbeaf7"] +, ["s", 115, "LATIN SMALL LETTER S", "#dbeaf7"] +, ["t", 116, "LATIN SMALL LETTER T", "#dbeaf7"] +, ["u", 117, "LATIN SMALL LETTER U", "#dbeaf7"] +, ["uhorn", 432, "LATIN SMALL LETTER U WITH HORN"] +, ["v", 118, "LATIN SMALL LETTER V", "#dbeaf7"] +, ["w", 119, "LATIN SMALL LETTER W", "#dbeaf7"] +, ["x", 120, "LATIN SMALL LETTER X", "#dbeaf7"] +, ["y", 121, "LATIN SMALL LETTER Y", "#dbeaf7"] +, ["z", 122, "LATIN SMALL LETTER Z", "#dbeaf7"] +, ["ae", 230, "LATIN SMALL LETTER AE"] +, ["oe", 339, "LATIN SMALL LIGATURE OE"] +, ["eth", 240, "LATIN SMALL LETTER ETH"] +, ["thorn", 254, "LATIN SMALL LETTER THORN"] +, ["alpha", 945, "GREEK SMALL LETTER ALPHA"] +, ["beta", 946, "GREEK SMALL LETTER BETA"] +, ["gamma", 947, "GREEK SMALL LETTER GAMMA"] +, ["delta", 948, "GREEK SMALL LETTER DELTA"] +, ["epsilon", 949, "GREEK SMALL LETTER EPSILON"] +, ["zeta", 950, "GREEK SMALL LETTER ZETA"] +, ["eta", 951, "GREEK SMALL LETTER ETA"] +, ["theta", 952, "GREEK SMALL LETTER THETA"] +, ["iota", 953, "GREEK SMALL LETTER IOTA"] +, ["lambda", 955, "GREEK SMALL LETTER LAMDA"] +, ["xi", 958, "GREEK SMALL LETTER XI"] +, ["pi", 960, "GREEK SMALL LETTER PI"] +, ["rho", 961, "GREEK SMALL LETTER RHO"] +, ["sigma1", 962, "GREEK SMALL LETTER FINAL SIGMA"] +, ["sigma", 963, "GREEK SMALL LETTER SIGMA"] +, ["tau", 964, "GREEK SMALL LETTER TAU"] +, ["upsilon", 965, "GREEK SMALL LETTER UPSILON"] +, ["phi", 966, "GREEK SMALL LETTER PHI"] +, ["psi", 968, "GREEK SMALL LETTER PSI"] +, ["omega", 969, "GREEK SMALL LETTER OMEGA"] +, ["tonos", 900, "GREEK TONOS", "#cceff2"] +, ["becyrillic", 1073, "CYRILLIC SMALL LETTER BE"] +, ["vecyrillic", 1074, "CYRILLIC SMALL LETTER VE"] +, ["gecyrillic", 1075, "CYRILLIC SMALL LETTER GHE"] +, ["gheupturncyrillic", 1169, "CYRILLIC SMALL LETTER GHE WITH UPTURN"] +, ["uni04FB", 1275, "CYRILLIC SMALL LETTER GHE WITH STROKE AND HOOK"] +, ["decyrillic", 1076, "CYRILLIC SMALL LETTER DE"] +, ["zhecyrillic", 1078, "CYRILLIC SMALL LETTER ZHE"] +, ["zhedescendercyrillic", 1175, "CYRILLIC SMALL LETTER ZHE WITH DESCENDER"] +, ["zecyrillic", 1079, "CYRILLIC SMALL LETTER ZE"] +, ["iicyrillic", 1080, "CYRILLIC SMALL LETTER I"] +, ["uni048B", 1163, "CYRILLIC SMALL LETTER SHORT I WITH TAIL"] +, ["kacyrillic", 1082, "CYRILLIC SMALL LETTER KA"] +, ["kahookcyrillic", 1220, "CYRILLIC SMALL LETTER KA WITH HOOK"] +, ["kadescendercyrillic", 1179, "CYRILLIC SMALL LETTER KA WITH DESCENDER"] +, ["kaverticalstrokecyrillic", 1181, "CYRILLIC SMALL LETTER KA WITH VERTICAL STROKE"] +, ["kabashkircyrillic", 1185, "CYRILLIC SMALL LETTER BASHKIR KA"] +, ["elcyrillic", 1083, "CYRILLIC SMALL LETTER EL"] +, ["uni04C6", 1222, "CYRILLIC SMALL LETTER EL WITH TAIL"] +, ["uni0513", 1299, "CYRILLIC SMALL LETTER EL WITH HOOK"] +, ["uni052F", 1327, "CYRILLIC SMALL LETTER EL WITH DESCENDER"] +, ["emcyrillic", 1084, "CYRILLIC SMALL LETTER EM"] +, ["uni04CE", 1230, "CYRILLIC SMALL LETTER EM WITH TAIL"] +, ["encyrillic", 1085, "CYRILLIC SMALL LETTER EN"] +, ["endescendercyrillic", 1187, "CYRILLIC SMALL LETTER EN WITH DESCENDER"] +, ["enghecyrillic", 1189, "CYRILLIC SMALL LIGATURE EN GHE"] +, ["enhookcyrillic", 1224, "CYRILLIC SMALL LETTER EN WITH HOOK"] +, ["uni04CA", 1226, "CYRILLIC SMALL LETTER EN WITH TAIL"] +, ["fitacyrillic", 1139, "CYRILLIC SMALL LETTER FITA"] +, ["pecyrillic", 1087, "CYRILLIC SMALL LETTER PE"] +, ["uni0525", 1317, "CYRILLIC SMALL LETTER PE WITH DESCENDER"] +, ["uni048F", 1167, "CYRILLIC SMALL LETTER ER WITH TICK"] +, ["tecyrillic", 1090, "CYRILLIC SMALL LETTER TE"] +, ["efcyrillic", 1092, "CYRILLIC SMALL LETTER EF"] +, ["uni04FD", 1277, "CYRILLIC SMALL LETTER HA WITH HOOK"] +, ["hadescendercyrillic", 1203, "CYRILLIC SMALL LETTER HA WITH DESCENDER"] +, ["tsecyrillic", 1094, "CYRILLIC SMALL LETTER TSE"] +, ["dzhecyrillic", 1119, "CYRILLIC SMALL LETTER DZHE"] +, ["checyrillic", 1095, "CYRILLIC SMALL LETTER CHE"] +, ["chedescendercyrillic", 1207, "CYRILLIC SMALL LETTER CHE WITH DESCENDER"] +, ["cheverticalstrokecyrillic", 1209, "CYRILLIC SMALL LETTER CHE WITH VERTICAL STROKE"] +, ["shacyrillic", 1096, "CYRILLIC SMALL LETTER SHA"] +, ["shchacyrillic", 1097, "CYRILLIC SMALL LETTER SHCHA"] +, ["softsigncyrillic", 1100, "CYRILLIC SMALL LETTER SOFT SIGN"] +, ["hardsigncyrillic", 1098, "CYRILLIC SMALL LETTER HARD SIGN"] +, ["yericyrillic", 1099, "CYRILLIC SMALL LETTER YERU"] +, ["uni048D", 1165, "CYRILLIC SMALL LETTER SEMISOFT SIGN", ""] +, ["yatcyrillic", 1123, "CYRILLIC SMALL LETTER YAT", ""] +, ["ljecyrillic", 1113, "CYRILLIC SMALL LETTER LJE"] +, ["njecyrillic", 1114, "CYRILLIC SMALL LETTER NJE"] +, ["ereversedcyrillic", 1101, "CYRILLIC SMALL LETTER E"] +, ["iucyrillic", 1102, "CYRILLIC SMALL LETTER YU"] +, ["iacyrillic", 1103, "CYRILLIC SMALL LETTER YA"] +, ["ghemiddlehookcyrillic", 1173, "CYRILLIC SMALL LETTER GHE WITH MIDDLE HOOK"] +, ["pemiddlehookcyrillic", 1191, "CYRILLIC SMALL LETTER PE WITH MIDDLE HOOK"] +, ["djecyrillic", 1106, "CYRILLIC SMALL LETTER DJE"] +, ["tshecyrillic", 1115, "CYRILLIC SMALL LETTER TSHE"] +, ["uni0527", 1319, "CYRILLIC SMALL LETTER SHHA WITH DESCENDER"] +, ["ecyrillic", 1108, "CYRILLIC SMALL LETTER UKRAINIAN IE"] +, ["haabkhasiancyrillic", 1193, "CYRILLIC SMALL LETTER ABKHASIAN HA"] +, ["tetsecyrillic", 1205, "CYRILLIC SMALL LIGATURE TE TSE"] +, ["cheabkhasiancyrillic", 1213, "CYRILLIC SMALL LETTER ABKHASIAN CHE"] +, ["dzeabkhasiancyrillic", 1249, "CYRILLIC SMALL LETTER ABKHASIAN DZE"] +, ["izhitsacyrillic", 1141, "CYRILLIC SMALL LETTER IZHITSA"] +, ["dz", 675, "LATIN SMALL LETTER DZ DIGRAPH", ""] +, ["Dzcaron", 453, "LATIN CAPITAL LETTER D WITH SMALL LETTER Z WITH CARON"] +, ["dzcaron", 454, "LATIN SMALL LETTER DZ WITH CARON"] +, ["germandbls", 223, "LATIN SMALL LETTER SHARP S"] +, ["dotlessi", 305, "LATIN SMALL LETTER DOTLESS I"] +, ["dotlessj", 567, "LATIN SMALL LETTER DOTLESS J"] +, ["dollar", 36, "DOLLAR SIGN", "#cceff2"] +, ["cent", 162, "CENT SIGN", "#cceff2"] +, ["yen", 165, "YEN SIGN", "#cceff2"] +, ["sterling", 163, "POUND SIGN", "#cceff2"] +, ["florin", 402, "LATIN SMALL LETTER F WITH HOOK"] +, ["uni20BA", 8378, "TURKISH LIRA SIGN", "#cceff2"] +, ["uni20BD", 8381, "RUBLE SIGN", "#cceff2"] +, ["Euro", 8364, "EURO SIGN", "#cceff2"] +, ["uni20B9", 8377, "INDIAN RUPEE SIGN", "#cceff2"] +, ["tenge", 8376, "TENGE SIGN", "#cceff2"] +, ["peseta", 8359, "PESETA SIGN", "#cceff2"] +, ["peso", 8369, "PESO SIGN", "#cceff2"] +, ["kip", 8365, "KIP SIGN", "#cceff2"] +, ["won", 8361, "WON SIGN", "#cceff2"] +, ["lira", 8356, "LIRA SIGN", "#cceff2"] +, ["hryvnia", 8372, "HRYVNIA SIGN", "#cceff2"] +, ["naira", 8358, "NAIRA SIGN", "#cceff2"] +, ["guarani", 8370, "GUARANI SIGN", "#cceff2"] +, ["coloncurrency", 8353, "COLON SIGN", "#cceff2"] +, ["cedi", 8373, "CEDI SIGN", "#cceff2"] +, ["cruzeiro", 8354, "CRUZEIRO SIGN", "#cceff2"] +, ["tugrik", 8366, "TUGRIK SIGN", "#cceff2"] +, ["mill", 8357, "MILL SIGN", "#cceff2"] +, ["afii57636", 8362, "NEW SHEQEL SIGN", "#cceff2"] +, ["manat", 8380, "MANAT SIGN", "#cceff2"] +, ["rupee", 8360, "RUPEE SIGN", "#cceff2"] +, ["lari", 8382, "LARI SIGN", "#cceff2"] +, ["zero", 48, "DIGIT ZERO", "#e2f4ea"] +, ["one", 49, "DIGIT ONE", "#e2f4ea"] +, ["two", 50, "DIGIT TWO", "#e2f4ea"] +, ["three", 51, "DIGIT THREE", "#e2f4ea"] +, ["four", 52, "DIGIT FOUR", "#e2f4ea"] +, ["five", 53, "DIGIT FIVE", "#e2f4ea"] +, ["six", 54, "DIGIT SIX", "#e2f4ea"] +, ["seven", 55, "DIGIT SEVEN", "#e2f4ea"] +, ["eight", 56, "DIGIT EIGHT", "#e2f4ea"] +, ["nine", 57, "DIGIT NINE", "#e2f4ea"] +, ["ampersand", 38, "AMPERSAND", "#efefef"] +, ["exclam", 33, "EXCLAMATION MARK", "#efefef"] +, ["exclamdown", 161, "INVERTED EXCLAMATION MARK", "#efefef"] +, ["question", 63, "QUESTION MARK", "#efefef"] +, ["questiondown", 191, "INVERTED QUESTION MARK", "#efefef"] +, ["parenleft", 40, "LEFT PARENTHESIS", "#efefef"] +, ["parenright", 41, "RIGHT PARENTHESIS", "#efefef"] +, ["bracketleft", 91, "LEFT SQUARE BRACKET", "#efefef"] +, ["bracketright", 93, "RIGHT SQUARE BRACKET", "#efefef"] +, ["braceleft", 123, "LEFT CURLY BRACKET", "#efefef"] +, ["braceright", 125, "RIGHT CURLY BRACKET", "#efefef"] +, ["at", 64, "COMMERCIAL AT", "#efefef"] +, ["numbersign", 35, "NUMBER SIGN", "#efefef"] +, ["currency", 164, "CURRENCY SIGN", "#cceff2"] +, ["slash", 47, "SOLIDUS", "#efefef"] +, ["bar", 124, "VERTICAL LINE", "#cceff2"] +, ["brokenbar", 166, "BROKEN BAR", "#cceff2"] +, ["backslash", 92, "REVERSE SOLIDUS", "#efefef"] +, ["hyphenbullet", 8259, "HYPHEN BULLET", "#efefef"] +, ["hyphen", 45, "HYPHEN-MINUS", "#efefef"] +, ["endash", 8211, "EN DASH", "#efefef"] +, ["figuredash", 8210, "FIGURE DASH", "#efefef"] +, ["emdash", 8212, "EM DASH", "#efefef"] +, ["bullet", 8226, "BULLET", "#efefef"] +, ["quoteleft", 8216, "LEFT SINGLE QUOTATION MARK", "#efefef"] +, ["quoteright", 8217, "RIGHT SINGLE QUOTATION MARK", "#efefef"] +, ["primetripl1", 8244, "TRIPLE PRIME", "#efefef"] +, ["quotesingle", 39, "APOSTROPHE", "#efefef"] +, ["quotedbl", 34, "QUOTATION MARK", "#efefef"] +, ["quotedblleft", 8220, "LEFT DOUBLE QUOTATION MARK", "#efefef"] +, ["quotedblright", 8221, "RIGHT DOUBLE QUOTATION MARK", "#efefef"] +, ["quotedblbase", 8222, "DOUBLE LOW-9 QUOTATION MARK", "#efefef"] +, ["quotesinglbase", 8218, "SINGLE LOW-9 QUOTATION MARK", "#efefef"] +, ["comma", 44, "COMMA", "#efefef"] +, ["period", 46, "FULL STOP", "#efefef"] +, ["ellipsis", 8230, "HORIZONTAL ELLIPSIS", "#efefef"] +, ["colon", 58, "COLON", "#efefef"] +, ["semicolon", 59, "SEMICOLON", "#efefef"] +, ["guilsinglleft", 8249, "SINGLE LEFT-POINTING ANGLE QUOTATION MARK", "#efefef"] +, ["guilsinglright", 8250, "SINGLE RIGHT-POINTING ANGLE QUOTATION MARK", "#efefef"] +, ["guillemotleft", 171, "LEFT-POINTING DOUBLE ANGLE QUOTATION MARK", "#efefef"] +, ["guillemotright", 187, "RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK", "#efefef"] +, ["less", 60, "LESS-THAN SIGN", "#cceff2"] +, ["greater", 62, "GREATER-THAN SIGN", "#cceff2"] +, ["lessequal", 8804, "LESS-THAN OR EQUAL TO", "#cceff2"] +, ["greaterequal", 8805, "GREATER-THAN OR EQUAL TO", "#cceff2"] +, ["equal", 61, "EQUALS SIGN", "#cceff2"] +, ["notequal", 8800, "NOT EQUAL TO", "#cceff2"] +, ["plus", 43, "PLUS SIGN", "#cceff2"] +, ["minus", 8722, "MINUS SIGN", "#cceff2"] +, ["multiply", 215, "MULTIPLICATION SIGN", "#cceff2"] +, ["divide", 247, "DIVISION SIGN", "#cceff2"] +, ["plusminus", 177, "PLUS-MINUS SIGN", "#cceff2"] +, ["approxequal", 8776, "ALMOST EQUAL TO", "#cceff2"] +, ["asciitilde", 126, "TILDE", "#cceff2"] +, ["logicalnot", 172, "NOT SIGN", "#cceff2"] +, ["periodcentered", 183, "MIDDLE DOT", "#efefef"] +, ["anoteleia", 903, "GREEK ANO TELEIA", "#efefef"] +, ["questiongreek", 894, "GREEK QUESTION MARK", "#efefef"] +, ["underscore", 95, "LOW LINE", "#efefef"] +, ["dagger", 8224, "DAGGER", "#efefef"] +, ["daggerdbl", 8225, "DOUBLE DAGGER", "#efefef"] +, ["asciicircum", 94, "CIRCUMFLEX ACCENT", "#cceff2"] +, ["asterisk", 42, "ASTERISK", "#efefef"] +, ["registered", 174, "REGISTERED SIGN", "#cceff2"] +, ["trademark", 8482, "TRADE MARK SIGN", "#cceff2"] +, ["servicemark", 8480, "SERVICE MARK", "#cceff2"] +, ["degree", 176, "DEGREE SIGN", "#cceff2"] +, ["ordfeminine", 170, "FEMININE ORDINAL INDICATOR"] +, ["ordmasculine", 186, "MASCULINE ORDINAL INDICATOR"] +, ["isuperior", 8305, "SUPERSCRIPT LATIN SMALL LETTER I"] +, ["nsuperior", 8319, "SUPERSCRIPT LATIN SMALL LETTER N"] +, ["paragraph", 182, "PILCROW SIGN", "#efefef"] +, ["section", 167, "SECTION SIGN", "#efefef"] +, ["copyright", 169, "COPYRIGHT SIGN", "#cceff2"] +, ["infinity", 8734, "INFINITY", "#cceff2"] +, ["partialdiff", 8706, "PARTIAL DIFFERENTIAL", "#cceff2"] +, ["summation", 8721, "N-ARY SUMMATION", "#cceff2"] +, ["product", 8719, "N-ARY PRODUCT", "#cceff2"] +, ["integral", 8747, "INTEGRAL", "#cceff2"] +, ["radical", 8730, "SQUARE ROOT", "#cceff2"] +, ["lozenge", 9674, "LOZENGE", "#cceff2"] +, ["fraction", 8260, "FRACTION SLASH", "#cceff2"] +, ["zerosub", 8320, "SUBSCRIPT ZERO", "#e2f4ea"] +, ["oneinferior", 8321, "SUBSCRIPT ONE", "#e2f4ea"] +, ["twosub", 8322, "SUBSCRIPT TWO", "#e2f4ea"] +, ["threesub", 8323, "SUBSCRIPT THREE", "#e2f4ea"] +, ["foursub", 8324, "SUBSCRIPT FOUR", "#e2f4ea"] +, ["fivesub", 8325, "SUBSCRIPT FIVE", "#e2f4ea"] +, ["sixinferior", 8326, "SUBSCRIPT SIX", "#e2f4ea"] +, ["sevensub", 8327, "SUBSCRIPT SEVEN", "#e2f4ea"] +, ["eightsub", 8328, "SUBSCRIPT EIGHT", "#e2f4ea"] +, ["nineinferior", 8329, "SUBSCRIPT NINE", "#e2f4ea"] +, ["plussub", 8330, "SUBSCRIPT PLUS SIGN", "#cceff2"] +, ["minussub", 8331, "SUBSCRIPT MINUS", "#cceff2"] +, ["equalsub", 8332, "SUBSCRIPT EQUALS SIGN", "#cceff2"] +, ["parenleftinferior", 8333, "SUBSCRIPT LEFT PARENTHESIS", "#efefef"] +, ["parenrightsub", 8334, "SUBSCRIPT RIGHT PARENTHESIS", "#efefef"] +, ["zerosuperior", 8304, "SUPERSCRIPT ZERO", "#e2f4ea"] +, ["onesuperior", 185, "SUPERSCRIPT ONE", "#e2f4ea"] +, ["twosuperior", 178, "SUPERSCRIPT TWO", "#e2f4ea"] +, ["threesuperior", 179, "SUPERSCRIPT THREE", "#e2f4ea"] +, ["foursuperior", 8308, "SUPERSCRIPT FOUR", "#e2f4ea"] +, ["fivesuperior", 8309, "SUPERSCRIPT FIVE", "#e2f4ea"] +, ["sixsuperior", 8310, "SUPERSCRIPT SIX", "#e2f4ea"] +, ["sevensuperior", 8311, "SUPERSCRIPT SEVEN", "#e2f4ea"] +, ["eightsuperior", 8312, "SUPERSCRIPT EIGHT", "#e2f4ea"] +, ["ninesuperior", 8313, "SUPERSCRIPT NINE", "#e2f4ea"] +, ["plussuperior", 8314, "SUPERSCRIPT PLUS SIGN", "#cceff2"] +, ["minussuperior", 8315, "SUPERSCRIPT MINUS", "#cceff2"] +, ["equalsuperior", 8316, "SUPERSCRIPT EQUALS SIGN", "#cceff2"] +, ["parenleftsuperior", 8317, "SUPERSCRIPT LEFT PARENTHESIS", "#efefef"] +, ["parenrightsuperior", 8318, "SUPERSCRIPT RIGHT PARENTHESIS", "#efefef"] +, ["onehalf", 189, "VULGAR FRACTION ONE HALF", "#e2f4ea"] +, ["onequarter", 188, "VULGAR FRACTION ONE QUARTER", "#e2f4ea"] +, ["threequarters", 190, "VULGAR FRACTION THREE QUARTERS", "#e2f4ea"] +, ["percent", 37, "PERCENT SIGN", "#efefef"] +, ["perthousand", 8240, "PER MILLE SIGN", "#efefef"] +, ["numero", 8470, "NUMERO SIGN", "#cceff2"] +, ["acute", 180, "ACUTE ACCENT", "#cceff2"] +, ["hungarumlaut", 733, "DOUBLE ACUTE ACCENT", "#cceff2"] +, ["grave", 96, "GRAVE ACCENT", "#cceff2"] +, ["dieresis", 168, "DIAERESIS", "#cceff2"] +, ["circumflex", 710, "MODIFIER LETTER CIRCUMFLEX ACCENT"] +, ["tilde", 732, "SMALL TILDE", "#cceff2"] +, ["macron", 175, "MACRON", "#cceff2"] +, ["breve", 728, "BREVE", "#cceff2"] +, ["dotaccent", 729, "DOT ABOVE", "#cceff2"] +, ["ring", 730, "RING ABOVE", "#cceff2"] +, ["caron", 711, "CARON"] +, ["dieresistonos", 901, "GREEK DIALYTIKA TONOS", "#cceff2"] +, ["cedilla", 184, "CEDILLA", "#cceff2"] +, ["ogonek", 731, "OGONEK", "#cceff2"] +, ["commaaccent", 63171, "[private use F6C3]"] +, ["acutecomb", 769, "COMBINING ACUTE ACCENT", "#f9e2e2"] +, ["acutedblnosp", 779, "COMBINING DOUBLE ACUTE ACCENT", "#f9e2e2"] +, ["gravecomb", 768, "COMBINING GRAVE ACCENT", "#f9e2e2"] +, ["dblgravecmb", 783, "COMBINING DOUBLE GRAVE ACCENT", "#f9e2e2"] +, ["macroncomb", 772, "COMBINING MACRON", "#f9e2e2"] +, ["dieresisnosp", 776, "COMBINING DIAERESIS", "#f9e2e2"] +, ["brevenosp", 774, "COMBINING BREVE", "#f9e2e2"] +, ["breveinvnosp", 785, "COMBINING INVERTED BREVE", "#f9e2e2"] +, ["hook", 777, "COMBINING HOOK ABOVE", "#f9e2e2"] +, ["circumflexnosp", 770, "COMBINING CIRCUMFLEX ACCENT", "#f9e2e2"] +, ["caroncmb", 780, "COMBINING CARON", "#f9e2e2"] +, ["tildecomb", 771, "COMBINING TILDE", "#f9e2e2"] +, ["dotnosp", 775, "COMBINING DOT ABOVE", "#f9e2e2"] +, ["ringnosp", 778, "COMBINING RING ABOVE", "#f9e2e2"] +, ["hornnosp", 795, "COMBINING HORN", "#f9e2e2"] +, ["ogoneknosp", 808, "COMBINING OGONEK", "#f9e2e2"] +, ["cedillanosp", 807, "COMBINING CEDILLA", "#f9e2e2"] +, ["circumflexsubnosp", 813, "COMBINING CIRCUMFLEX ACCENT BELOW", "#f9e2e2"] +, ["brevesubnosp", 814, "COMBINING BREVE BELOW", "#f9e2e2"] +, ["dotbelow", 803, "COMBINING DOT BELOW", "#f9e2e2"] +, ["commabelowcmb", 806, "COMBINING COMMA BELOW", "#f9e2e2"] +, ["dotdblsubnosp", 804, "COMBINING DIAERESIS BELOW", "#f9e2e2"] +, ["ringsubnosp", 805, "COMBINING RING BELOW", "#f9e2e2"] +, ["underscorenosp", 818, "COMBINING LOW LINE", "#f9e2e2"] +, ["tildesubnosp", 816, "COMBINING TILDE BELOW", "#f9e2e2"] +, ["enquad", 8192, "EN QUAD", "#ede5f9"] +, ["emquad", 8193, "EM QUAD", "#ede5f9"] +, ["enspace", 8194, "EN SPACE", "#ede5f9"] +, ["emspace", 8195, "EM SPACE", "#ede5f9"] +, ["thirdemspace", 8196, "THREE-PER-EM SPACE", "#ede5f9"] +, ["quarteremspace", 8197, "FOUR-PER-EM SPACE", "#ede5f9"] +, ["sixthemspace", 8198, "SIX-PER-EM SPACE", "#ede5f9"] +, ["figurespace", 8199, "FIGURE SPACE", "#ede5f9"] +, ["punctuationspace", 8200, "PUNCTUATION SPACE", "#ede5f9"] +, ["thinspace", 8201, "THIN SPACE", "#ede5f9"] +, ["hairspace", 8202, "HAIR SPACE", "#ede5f9"] +, ["zerowidthspace", 8203, "ZERO WIDTH SPACE", "#efefef"] +, ["arrowleft", 8592, "LEFTWARDS ARROW", "#cceff2"] +, ["arrowup", 8593, "UPWARDS ARROW", "#cceff2"] +, ["arrowright", 8594, "RIGHTWARDS ARROW", "#cceff2"] +, ["arrowdown", 8595, "DOWNWARDS ARROW", "#cceff2"] +, ["triagrt", 9658, "BLACK RIGHT-POINTING POINTER", "#cceff2"] +, ["triagdn", 9660, "BLACK DOWN-POINTING TRIANGLE", "#cceff2"] +, ["triagup", 9650, "BLACK UP-POINTING TRIANGLE", "#cceff2"] +, ["blackleftpointingtriangle", 9664, "BLACK LEFT-POINTING TRIANGLE", "#cceff2"] +, ["mu", 181, "MICRO SIGN"] +, ["uni2009.fra", null, null, "#dddddd"] +, ["overscoredblnosp", 831, "COMBINING DOUBLE OVERLINE", "#f9e2e2"] +, ["uni215A", 8538, "VULGAR FRACTION FIVE SIXTHS", "#e2f4ea"] +, ["uni02E5_uni02E7_uni02E8", null, null, "#dddddd"] +, ["uni215F", 8543, "FRACTION NUMERATOR ONE", "#e2f4ea"] +, ["tturn", 647, "LATIN SMALL LETTER TURNED T"] +, ["uni2184.cn", null, null, "#c4f2c1"] +, ["uni2DEA", 11754, "COMBINING CYRILLIC LETTER O", "#f9e2e2"] +, ["uni2DEB", 11755, "COMBINING CYRILLIC LETTER PE", "#f9e2e2"] +, ["uni2DEC", 11756, "COMBINING CYRILLIC LETTER ER", "#f9e2e2"] +, ["uni2DED", 11757, "COMBINING CYRILLIC LETTER ES", "#f9e2e2"] +, ["uni2DEE", 11758, "COMBINING CYRILLIC LETTER TE", "#f9e2e2"] +, ["macronacutecomb", null, null, "#dddddd"] +, ["uni1D91", 7569, "LATIN SMALL LETTER D WITH HOOK AND TAIL"] +, ["uni1D90", 7568, "LATIN SMALL LETTER ALPHA WITH RETROFLEX HOOK"] +, ["uni1D93", 7571, "LATIN SMALL LETTER OPEN E WITH RETROFLEX HOOK"] +, ["uni1D92", 7570, "LATIN SMALL LETTER E WITH RETROFLEX HOOK"] +, ["uni1D95", 7573, "LATIN SMALL LETTER SCHWA WITH RETROFLEX HOOK"] +, ["uni1D94", 7572, "LATIN SMALL LETTER REVERSED OPEN E WITH RETROFLEX HOOK"] +, ["uni1D97", 7575, "LATIN SMALL LETTER OPEN O WITH RETROFLEX HOOK"] +, ["uni1D96", 7574, "LATIN SMALL LETTER I WITH RETROFLEX HOOK"] +, ["uni1D99", 7577, "LATIN SMALL LETTER U WITH RETROFLEX HOOK"] +, ["uni1D98", 7576, "LATIN SMALL LETTER ESH WITH RETROFLEX HOOK"] +, ["uniA659", 42585, "CYRILLIC SMALL LETTER CLOSED LITTLE YUS"] +, ["uniA658", 42584, "CYRILLIC CAPITAL LETTER CLOSED LITTLE YUS"] +, ["crossbar", null, null, "#dddddd"] +, ["uniA653", 42579, "CYRILLIC SMALL LETTER IOTIFIED YAT"] +, ["uniA652", 42578, "CYRILLIC CAPITAL LETTER IOTIFIED YAT"] +, ["austral", 8371, "AUSTRAL SIGN", "#cceff2"] +, ["uniA651", 42577, "CYRILLIC SMALL LETTER YERU WITH BACK YER"] +, ["uniA650", 42576, "CYRILLIC CAPITAL LETTER YERU WITH BACK YER"] +, ["uniA657", 42583, "CYRILLIC SMALL LETTER IOTIFIED A"] +, ["uniA656", 42582, "CYRILLIC CAPITAL LETTER IOTIFIED A"] +, ["uniA655", 42581, "CYRILLIC SMALL LETTER REVERSED YU"] +, ["uniA65C", 42588, "CYRILLIC CAPITAL LETTER IOTIFIED CLOSED LITTLE YUS"] +, ["uniA65B", 42587, "CYRILLIC SMALL LETTER BLENDED YUS"] +, ["uniA65A", 42586, "CYRILLIC CAPITAL LETTER BLENDED YUS"] +, ["uniA65F", 42591, "CYRILLIC SMALL LETTER YN"] +, ["uniA65E", 42590, "CYRILLIC CAPITAL LETTER YN"] +, ["uniA65D", 42589, "CYRILLIC SMALL LETTER IOTIFIED CLOSED LITTLE YUS"] +, ["uni1D9A", 7578, "LATIN SMALL LETTER EZH WITH RETROFLEX HOOK"] +, ["uni1D9C", 7580, "MODIFIER LETTER SMALL C"] +, ["uni1D9B", 7579, "MODIFIER LETTER SMALL TURNED ALPHA"] +, ["uni006A.ccmp", null, null, "#dddddd"] +, ["uni1D9D", 7581, "MODIFIER LETTER SMALL C WITH CURL"] +, ["uni1D9F", 7583, "MODIFIER LETTER SMALL REVERSED OPEN E"] +, ["acutedotcomb.lc", null, null, "#dddddd"] +, ["uni2DE9", 11753, "COMBINING CYRILLIC LETTER EN", "#f9e2e2"] +, ["tildedieresiscomb.lc", null, null, "#dddddd"] +, ["uni2DE0", 11744, "COMBINING CYRILLIC LETTER BE", "#f9e2e2"] +, ["uni2DE1", 11745, "COMBINING CYRILLIC LETTER VE", "#f9e2e2"] +, ["uni2DE2", 11746, "COMBINING CYRILLIC LETTER GHE", "#f9e2e2"] +, ["uni2DE3", 11747, "COMBINING CYRILLIC LETTER DE", "#f9e2e2"] +, ["uni2DE4", 11748, "COMBINING CYRILLIC LETTER ZHE", "#f9e2e2"] +, ["uni2DE5", 11749, "COMBINING CYRILLIC LETTER ZE", "#f9e2e2"] +, ["uni2DE6", 11750, "COMBINING CYRILLIC LETTER KA", "#f9e2e2"] +, ["uni2DE7", 11751, "COMBINING CYRILLIC LETTER EL", "#f9e2e2"] +, ["drthook", 598, "LATIN SMALL LETTER D WITH TAIL"] +, ["qhook", 672, "LATIN SMALL LETTER Q WITH HOOK"] +, ["underscoredbl", 8215, "DOUBLE LOW LINE", "#efefef"] +, ["uni2151", 8529, "VULGAR FRACTION ONE NINTH", "#e2f4ea"] +, ["uni2150", 8528, "VULGAR FRACTION ONE SEVENTH", "#e2f4ea"] +, ["uni2152", 8530, "VULGAR FRACTION ONE TENTH", "#e2f4ea"] +, ["uni2155", 8533, "VULGAR FRACTION ONE FIFTH", "#e2f4ea"] +, ["romanone", 8544, "ROMAN NUMERAL ONE", "#e2f4ea"] +, ["uni2157", 8535, "VULGAR FRACTION THREE FIFTHS", "#e2f4ea"] +, ["uni2156", 8534, "VULGAR FRACTION TWO FIFTHS", "#e2f4ea"] +, ["nrthook", 627, "LATIN SMALL LETTER N WITH RETROFLEX HOOK"] +, ["estimated", 8494, "ESTIMATED SYMBOL", "#cceff2"] +, ["hturn", 613, "LATIN SMALL LETTER TURNED H"] +, ["degreecentigrade", 8451, "DEGREE CELSIUS", "#cceff2"] +, ["dieresisacutecomb.lc", null, null, "#dddddd"] +, ["uni2DEF", 11759, "COMBINING CYRILLIC LETTER HA", "#f9e2e2"] +, ["uniAB59", 43865, "LATIN SMALL LETTER X WITH LONG LEFT LEG WITH SERIF"] +, ["uniAB58", 43864, "LATIN SMALL LETTER X WITH LONG LEFT LEG AND LOW RIGHT RING"] +, ["uniAB57", 43863, "LATIN SMALL LETTER X WITH LONG LEFT LEG"] +, ["uniAB56", 43862, "LATIN SMALL LETTER X WITH LOW RIGHT RING"] +, ["Gbar", 484, "LATIN CAPITAL LETTER G WITH STROKE"] +, ["uniAB54", 43860, "LATIN SMALL LETTER CHI WITH LOW RIGHT RING"] +, ["uniAB53", 43859, "LATIN SMALL LETTER CHI"] +, ["uniAB52", 43858, "LATIN SMALL LETTER U WITH LEFT HOOK"] +, ["uniAB51", 43857, "LATIN SMALL LETTER TURNED UI"] +, ["uniAB50", 43856, "LATIN SMALL LETTER UI"] +, ["carondotcomb.lc", null, null, "#dddddd"] +, ["uni02E6_uni02E9_uni02E6", null, null, "#dddddd"] +, ["uniAB5F", 43871, "MODIFIER LETTER SMALL U WITH LEFT HOOK"] +, ["uniAB5E", 43870, "MODIFIER LETTER SMALL L WITH MIDDLE TILDE"] +, ["uniAB5D", 43869, "MODIFIER LETTER SMALL L WITH INVERTED LAZY S"] +, ["uniAB5C", 43868, "MODIFIER LETTER SMALL HENG"] +, ["uniAB5B", 43867, "MODIFIER BREVE WITH INVERTED BREVE", "#cceff2"] +, ["uniAB5A", 43866, "LATIN SMALL LETTER Y WITH SHORT RIGHT LEG"] +, ["uni02E7_uni02E6_uni02E6", null, null, "#dddddd"] +, ["aturn_uni02DE", null, null, "#dddddd"] +, ["uni02E9_uni02E5_uni02E5", null, null, "#dddddd"] +, ["uni02E9_uni02E5_uni02E6", null, null, "#dddddd"] +, ["uni02E9_uni02E5_uni02E7", null, null, "#dddddd"] +, ["uni1FFD", 8189, "GREEK OXIA", "#cceff2"] +, ["uni1FFE", 8190, "GREEK DASIA", "#cceff2"] +, ["uni02E9_uni02E5_uni02E8", null, null, "#dddddd"] +, ["uni02E9_uni02E5_uni02E9", null, null, "#dddddd"] +, ["uni02E6_uni02E9_uni02E8", null, null, "#dddddd"] +, ["oopen", 596, "LATIN SMALL LETTER OPEN O"] +, ["Gsmallcaphook", 667, "LATIN LETTER SMALL CAPITAL G WITH HOOK"] +, ["uni0346", 838, "COMBINING BRIDGE ABOVE", "#f9e2e2"] +, ["anglesupnosp", 794, "COMBINING LEFT ANGLE ABOVE", "#f9e2e2"] +, ["uniFFFC", 65532, "OBJECT REPLACEMENT CHARACTER", "#cceff2"] +, ["uniFFFD", 65533, "REPLACEMENT CHARACTER", "#cceff2"] +, ["uni02E7_uni02E9", null, null, "#dddddd"] +, ["uni02E7_uni02E8", null, null, "#dddddd"] +, ["uni02E7_uni02E5", null, null, "#dddddd"] +, ["uni02E7_uni02E6", null, null, "#dddddd"] +, ["tildesubnosp_uni1ABD", null, null, "#dddddd"] +, ["uniA794.cn", null, null, "#c4f2c1"] +, ["ringacute", null, null, "#dddddd"] +, ["ubar", 649, "LATIN SMALL LETTER U BAR"] +, ["uni2C7A.cn", null, null, "#c4f2c1"] +, ["uni2E3F", 11839, "CAPITULUM", "#efefef"] +, ["uni1DD8", 7640, "COMBINING LATIN SMALL LETTER INSULAR D", "#f9e2e2"] +, ["uni1DD9", 7641, "COMBINING LATIN SMALL LETTER ETH", "#f9e2e2"] +, ["uniA73D", 42813, "LATIN SMALL LETTER AY"] +, ["uniA73E", 42814, "LATIN CAPITAL LETTER REVERSED C WITH DOT"] +, ["uniA73F", 42815, "LATIN SMALL LETTER REVERSED C WITH DOT"] +, ["uni1DD2", 7634, "COMBINING US ABOVE", "#f9e2e2"] +, ["uni1DD3", 7635, "COMBINING LATIN SMALL LETTER FLATTENED OPEN A ABOVE", "#f9e2e2"] +, ["uni1DD0", 7632, "COMBINING IS BELOW", "#f9e2e2"] +, ["uni1DD1", 7633, "COMBINING UR ABOVE", "#f9e2e2"] +, ["circumflexgravecomb", null, null, "#dddddd"] +, ["uni1DD4", 7636, "COMBINING LATIN SMALL LETTER AE", "#f9e2e2"] +, ["uni1DD5", 7637, "COMBINING LATIN SMALL LETTER AO", "#f9e2e2"] +, ["dieresiscaroncomb", null, null, "#dddddd"] +, ["eng", 331, "LATIN SMALL LETTER ENG"] +, ["breveinvsubnosp", 815, "COMBINING INVERTED BREVE BELOW", "#f9e2e2"] +, ["uni02E9_uni02E9_uni02E5", null, null, "#dddddd"] +, ["uni02E9_uni02E9_uni02E6", null, null, "#dddddd"] +, ["uni02E9_uni02E9_uni02E7", null, null, "#dddddd"] +, ["commaturnsupnosp", 786, "COMBINING TURNED COMMA ABOVE", "#f9e2e2"] +, ["uniA730", 42800, "LATIN LETTER SMALL CAPITAL F"] +, ["uniA731", 42801, "LATIN LETTER SMALL CAPITAL S"] +, ["uniA732", 42802, "LATIN CAPITAL LETTER AA"] +, ["uniA733", 42803, "LATIN SMALL LETTER AA"] +, ["uniA734", 42804, "LATIN CAPITAL LETTER AO"] +, ["uniA735", 42805, "LATIN SMALL LETTER AO"] +, ["uniA736", 42806, "LATIN CAPITAL LETTER AU"] +, ["uniA737", 42807, "LATIN SMALL LETTER AU"] +, ["uni1DDB", 7643, "COMBINING LATIN LETTER SMALL CAPITAL G", "#f9e2e2"] +, ["uni1DDC", 7644, "COMBINING LATIN SMALL LETTER K", "#f9e2e2"] +, ["ringrighthalfsubnosp", 825, "COMBINING RIGHT HALF RING BELOW", "#f9e2e2"] +, ["uni1DDF", 7647, "COMBINING LATIN LETTER SMALL CAPITAL M", "#f9e2e2"] +, ["uni1DDD", 7645, "COMBINING LATIN SMALL LETTER L", "#f9e2e2"] +, ["uni1DDE", 7646, "COMBINING LATIN LETTER SMALL CAPITAL L", "#f9e2e2"] +, ["romanten", 8553, "ROMAN NUMERAL TEN", "#e2f4ea"] +, ["degreefarenheit", 8457, "DEGREE FAHRENHEIT", "#cceff2"] +, ["uni1D9E", 7582, "MODIFIER LETTER SMALL ETH"] +, ["Upsilon2", 433, "LATIN CAPITAL LETTER UPSILON"] +, ["degreekelvin", 8490, "KELVIN SIGN"] +, ["uniA64A", 42570, "CYRILLIC CAPITAL LETTER MONOGRAPH UK"] +, ["uniA64B", 42571, "CYRILLIC SMALL LETTER MONOGRAPH UK"] +, ["uniA64C", 42572, "CYRILLIC CAPITAL LETTER BROAD OMEGA"] +, ["uniA64D", 42573, "CYRILLIC SMALL LETTER BROAD OMEGA"] +, ["uniA64E", 42574, "CYRILLIC CAPITAL LETTER NEUTRAL YER"] +, ["uniA64F", 42575, "CYRILLIC SMALL LETTER NEUTRAL YER"] +, ["uni1D8F", 7567, "LATIN SMALL LETTER A WITH RETROFLEX HOOK"] +, ["uni1D8D", 7565, "LATIN SMALL LETTER X WITH PALATAL HOOK"] +, ["uni1D8E", 7566, "LATIN SMALL LETTER Z WITH PALATAL HOOK"] +, ["uni1D8B", 7563, "LATIN SMALL LETTER ESH WITH PALATAL HOOK"] +, ["uni1D8C", 7564, "LATIN SMALL LETTER V WITH PALATAL HOOK"] +, ["uni1D8A", 7562, "LATIN SMALL LETTER S WITH PALATAL HOOK"] +, ["uni02AE", 686, "LATIN SMALL LETTER TURNED H WITH FISHHOOK"] +, ["uni02AB", 683, "LATIN SMALL LETTER LZ DIGRAPH"] +, ["uni02AC", 684, "LATIN LETTER BILABIAL PERCUSSIVE"] +, ["uni1FCD", 8141, "GREEK PSILI AND VARIA", "#cceff2"] +, ["uni2DE8", 11752, "COMBINING CYRILLIC LETTER EM", "#f9e2e2"] +, ["kappa1", 1008, "GREEK KAPPA SYMBOL"] +, ["uni02A9", 681, "LATIN SMALL LETTER FENG DIGRAPH"] +, ["onethird", 8531, "VULGAR FRACTION ONE THIRD", "#e2f4ea"] +, ["uni1D88", 7560, "LATIN SMALL LETTER P WITH PALATAL HOOK"] +, ["uni1D89", 7561, "LATIN SMALL LETTER R WITH PALATAL HOOK"] +, ["uni1D86", 7558, "LATIN SMALL LETTER M WITH PALATAL HOOK"] +, ["uni1D87", 7559, "LATIN SMALL LETTER N WITH PALATAL HOOK"] +, ["uni1D84", 7556, "LATIN SMALL LETTER K WITH PALATAL HOOK"] +, ["uni1D85", 7557, "LATIN SMALL LETTER L WITH PALATAL HOOK"] +, ["uni1D82", 7554, "LATIN SMALL LETTER F WITH PALATAL HOOK"] +, ["vturn", 652, "LATIN SMALL LETTER TURNED V"] +, ["uni1D81", 7553, "LATIN SMALL LETTER D WITH PALATAL HOOK"] +, ["uniA648", 42568, "CYRILLIC CAPITAL LETTER DJERV"] +, ["uniA649", 42569, "CYRILLIC SMALL LETTER DJERV"] +, ["uniA640", 42560, "CYRILLIC CAPITAL LETTER ZEMLYA"] +, ["uniA641", 42561, "CYRILLIC SMALL LETTER ZEMLYA"] +, ["uniA642", 42562, "CYRILLIC CAPITAL LETTER DZELO"] +, ["uniA643", 42563, "CYRILLIC SMALL LETTER DZELO"] +, ["uniA644", 42564, "CYRILLIC CAPITAL LETTER REVERSED DZE"] +, ["uniA645", 42565, "CYRILLIC SMALL LETTER REVERSED DZE"] +, ["uniA646", 42566, "CYRILLIC CAPITAL LETTER IOTA"] +, ["uniA647", 42567, "CYRILLIC SMALL LETTER IOTA"] +, ["uniAB4D", 43853, "LATIN SMALL LETTER BASELINE ESH"] +, ["uniAB4E", 43854, "LATIN SMALL LETTER U WITH SHORT RIGHT LEG"] +, ["uniAB4F", 43855, "LATIN SMALL LETTER U BAR WITH SHORT RIGHT LEG"] +, ["uniAB4A", 43850, "LATIN SMALL LETTER DOUBLE R WITH CROSSED-TAIL"] +, ["uniAB4B", 43851, "LATIN SMALL LETTER SCRIPT R"] +, ["uniAB4C", 43852, "LATIN SMALL LETTER SCRIPT R WITH RING"] +, ["uni2DF5", 11765, "COMBINING CYRILLIC LETTER ES-TE", "#f9e2e2"] +, ["uni2DF4", 11764, "COMBINING CYRILLIC LETTER FITA", "#f9e2e2"] +, ["uni2DF7", 11767, "COMBINING CYRILLIC LETTER IE", "#f9e2e2"] +, ["uni2DF6", 11766, "COMBINING CYRILLIC LETTER A", "#f9e2e2"] +, ["uni2DF1", 11761, "COMBINING CYRILLIC LETTER CHE", "#f9e2e2"] +, ["uni2DF0", 11760, "COMBINING CYRILLIC LETTER TSE", "#f9e2e2"] +, ["uni2DF3", 11763, "COMBINING CYRILLIC LETTER SHCHA", "#f9e2e2"] +, ["uni2DF2", 11762, "COMBINING CYRILLIC LETTER SHA", "#f9e2e2"] +, ["uni2DF9", 11769, "COMBINING CYRILLIC LETTER MONOGRAPH UK", "#f9e2e2"] +, ["uni2DF8", 11768, "COMBINING CYRILLIC LETTER DJERV", "#f9e2e2"] +, ["uni2159", 8537, "VULGAR FRACTION ONE SIXTH", "#e2f4ea"] +, ["uni2158", 8536, "VULGAR FRACTION FOUR FIFTHS", "#e2f4ea"] +, ["uniA68B", 42635, "CYRILLIC SMALL LETTER TE WITH MIDDLE HOOK"] +, ["uniA68C", 42636, "CYRILLIC CAPITAL LETTER TWE"] +, ["uni214F", 8527, "SYMBOL FOR SAMARITAN SOURCE", "#cceff2"] +, ["uni214D", 8525, "AKTIESELSKAB", "#cceff2"] +, ["OEsmallcap", 630, "LATIN LETTER SMALL CAPITAL OE"] +, ["tackupsubnosp", 797, "COMBINING UP TACK BELOW", "#f9e2e2"] +, ["uni2DFE", 11774, "COMBINING CYRILLIC LETTER BIG YUS", "#f9e2e2"] +, ["uni2DFD", 11773, "COMBINING CYRILLIC LETTER LITTLE YUS", "#f9e2e2"] +, ["uni2DFF", 11775, "COMBINING CYRILLIC LETTER IOTIFIED BIG YUS", "#f9e2e2"] +, ["uni2DFA", 11770, "COMBINING CYRILLIC LETTER YAT", "#f9e2e2"] +, ["uni2DFC", 11772, "COMBINING CYRILLIC LETTER IOTIFIED A", "#f9e2e2"] +, ["uni2DFB", 11771, "COMBINING CYRILLIC LETTER YU", "#f9e2e2"] +, ["uniAB48", 43848, "LATIN SMALL LETTER DOUBLE R"] +, ["uniAB49", 43849, "LATIN SMALL LETTER R WITH CROSSED-TAIL"] +, ["uniAB44", 43844, "LATIN SMALL LETTER TURNED O OPEN-O WITH STROKE"] +, ["uniAB45", 43845, "LATIN SMALL LETTER STIRRUP R"] +, ["uniAB46", 43846, "LATIN LETTER SMALL CAPITAL R WITH RIGHT LEG"] +, ["uniAB47", 43847, "LATIN SMALL LETTER R WITHOUT HANDLE"] +, ["uniAB40", 43840, "LATIN SMALL LETTER INVERTED OE"] +, ["uniAB41", 43841, "LATIN SMALL LETTER TURNED OE WITH STROKE"] +, ["glottalstopbarinv", 446, "LATIN LETTER INVERTED GLOTTAL STOP WITH STROKE"] +, ["uniAB43", 43843, "LATIN SMALL LETTER TURNED O OPEN-O"] +, ["seagullsubnosp", 828, "COMBINING SEAGULL BELOW", "#f9e2e2"] +, ["uni02EC", 748, "MODIFIER LETTER VOICING"] +, ["afii300", 8207, "RIGHT-TO-LEFT MARK", "#efefef"] +, ["caronnosp", null, null, "#dddddd"] +, ["uni02E7_uni02E9_uni02E8", null, null, "#dddddd"] +, ["uni02E7_uni02E9_uni02E9", null, null, "#dddddd"] +, ["uni02E7_uni02E9_uni02E6", null, null, "#dddddd"] +, ["uni02E7_uni02E9_uni02E7", null, null, "#dddddd"] +, ["uni02E7_uni02E9_uni02E5", null, null, "#dddddd"] +, ["romanfour", 8547, "ROMAN NUMERAL FOUR", "#e2f4ea"] +, ["uniA744", 42820, "LATIN CAPITAL LETTER K WITH STROKE AND DIAGONAL STROKE"] +, ["macrongravecomb", null, null, "#dddddd"] +, ["lmidtilde", 619, "LATIN SMALL LETTER L WITH MIDDLE TILDE"] +, ["uni03F5", 1013, "GREEK LUNATE EPSILON SYMBOL"] +, ["uni03F6", 1014, "GREEK REVERSED LUNATE EPSILON SYMBOL", "#cceff2"] +, ["longs", 383, "LATIN SMALL LETTER LONG S"] +, ["Ghook.cn", null, null, "#c4f2c1"] +, ["tonebarextrahighmod", 741, "MODIFIER LETTER EXTRA-HIGH TONE BAR", "#cceff2"] +, ["ringsubnosp_uni1ABD", null, null, "#dddddd"] +, ["one.sup", null, null, "#dddddd"] +, ["dieresismacroncomb", null, null, "#dddddd"] +, ["trianglebullet", 8227, "TRIANGULAR BULLET", "#efefef"] +, ["uni03FF.cn", null, null, "#c4f2c1"] +, ["uniA74F", 42831, "LATIN SMALL LETTER OO"] +, ["Ysmallcap", 655, "LATIN LETTER SMALL CAPITAL Y"] +, ["rrthook", 637, "LATIN SMALL LETTER R WITH TAIL"] +, ["uni03FB", 1019, "GREEK SMALL LETTER SAN"] +, ["uni03FC", 1020, "GREEK RHO WITH STROKE SYMBOL"] +, ["uni03FD", 1021, "GREEK CAPITAL REVERSED LUNATE SIGMA SYMBOL"] +, ["uni03FE", 1022, "GREEK CAPITAL DOTTED LUNATE SIGMA SYMBOL"] +, ["uni03FF", 1023, "GREEK CAPITAL REVERSED DOTTED LUNATE SIGMA SYMBOL"] +, ["iota1_uni02DE", null, null, "#dddddd"] +, ["uni0373", 883, "GREEK SMALL LETTER ARCHAIC SAMPI"] +, ["uni0372", 882, "GREEK CAPITAL LETTER ARCHAIC SAMPI"] +, ["uni0371", 881, "GREEK SMALL LETTER HETA"] +, ["uni0370", 880, "GREEK CAPITAL LETTER HETA"] +, ["uni0377", 887, "GREEK SMALL LETTER PAMPHYLIAN DIGAMMA"] +, ["uni0376", 886, "GREEK CAPITAL LETTER PAMPHYLIAN DIGAMMA"] +, ["mturndescend", 624, "LATIN SMALL LETTER TURNED M WITH LONG LEG"] +, ["uniA709", 42761, "MODIFIER LETTER HIGH DOTTED TONE BAR", "#cceff2"] +, ["uniA708", 42760, "MODIFIER LETTER EXTRA-HIGH DOTTED TONE BAR", "#cceff2"] +, ["uniA705", 42757, "MODIFIER LETTER CHINESE TONE YANG QU", "#cceff2"] +, ["uniA704", 42756, "MODIFIER LETTER CHINESE TONE YIN QU", "#cceff2"] +, ["uniA707", 42759, "MODIFIER LETTER CHINESE TONE YANG RU", "#cceff2"] +, ["uniA706", 42758, "MODIFIER LETTER CHINESE TONE YIN RU", "#cceff2"] +, ["uniA701", 42753, "MODIFIER LETTER CHINESE TONE YANG PING", "#cceff2"] +, ["uniA700", 42752, "MODIFIER LETTER CHINESE TONE YIN PING", "#cceff2"] +, ["uniA703", 42755, "MODIFIER LETTER CHINESE TONE YANG SHANG", "#cceff2"] +, ["uniA702", 42754, "MODIFIER LETTER CHINESE TONE YIN SHANG", "#cceff2"] +, ["gravesubnosp", 790, "COMBINING GRAVE ACCENT BELOW", "#f9e2e2"] +, ["seveneighths", 8542, "VULGAR FRACTION SEVEN EIGHTHS", "#e2f4ea"] +, ["undertie", 8255, "UNDERTIE", "#efefef"] +, ["dotbelow_uni1ABD", null, null, "#dddddd"] +, ["barmidshortnosp", 821, "COMBINING SHORT STROKE OVERLAY", "#f9e2e2"] +, ["glottalstoprevinv", 661, "LATIN LETTER PHARYNGEAL VOICED FRICATIVE"] +, ["uni023D", 573, "LATIN CAPITAL LETTER L WITH BAR"] +, ["uniA70E", 42766, "MODIFIER LETTER HIGH DOTTED LEFT-STEM TONE BAR", "#cceff2"] +, ["uniA70F", 42767, "MODIFIER LETTER MID DOTTED LEFT-STEM TONE BAR", "#cceff2"] +, ["uniA70A", 42762, "MODIFIER LETTER MID DOTTED TONE BAR", "#cceff2"] +, ["uniAB55", 43861, "LATIN SMALL LETTER CHI WITH LOW LEFT SERIF"] +, ["uniA70C", 42764, "MODIFIER LETTER EXTRA-LOW DOTTED TONE BAR", "#cceff2"] +, ["uniA689", 42633, "CYRILLIC SMALL LETTER DZZE"] +, ["uni037C", 892, "GREEK SMALL DOTTED LUNATE SIGMA SYMBOL"] +, ["uni037B", 891, "GREEK SMALL REVERSED LUNATE SIGMA SYMBOL"] +, ["uni037D", 893, "GREEK SMALL REVERSED DOTTED LUNATE SIGMA SYMBOL"] +, ["uni03FD.cn", null, null, "#c4f2c1"] +, ["srthook.cn", null, null, "#c4f2c1"] +, ["interrobang", 8253, "INTERROBANG", "#efefef"] +, ["primemod", 697, "MODIFIER LETTER PRIME"] +, ["uni213B", 8507, "FACSIMILE SIGN", "#cceff2"] +, ["uniA675", 42613, "COMBINING CYRILLIC LETTER I", "#f9e2e2"] +, ["uniA674", 42612, "COMBINING CYRILLIC LETTER UKRAINIAN IE", "#f9e2e2"] +, ["uniA677", 42615, "COMBINING CYRILLIC LETTER U", "#f9e2e2"] +, ["uniA676", 42614, "COMBINING CYRILLIC LETTER YI", "#f9e2e2"] +, ["erev", 600, "LATIN SMALL LETTER REVERSED E"] +, ["uniA670", 42608, "COMBINING CYRILLIC TEN MILLIONS SIGN", "#f9e2e2"] +, ["uniA673", 42611, "SLAVONIC ASTERISK", "#efefef"] +, ["uniA672", 42610, "COMBINING CYRILLIC THOUSAND MILLIONS SIGN", "#f9e2e2"] +, ["uniA679", 42617, "COMBINING CYRILLIC LETTER YERU", "#f9e2e2"] +, ["uniA678", 42616, "COMBINING CYRILLIC LETTER HARD SIGN", "#f9e2e2"] +, ["uniA71D", 42781, "MODIFIER LETTER RAISED EXCLAMATION MARK"] +, ["uni02E9_uni02E6_uni02E9", null, null, "#dddddd"] +, ["uni02E9_uni02E6_uni02E8", null, null, "#dddddd"] +, ["uni02E9_uni02E6_uni02E7", null, null, "#dddddd"] +, ["uni02E9_uni02E6_uni02E6", null, null, "#dddddd"] +, ["uni02E9_uni02E6_uni02E5", null, null, "#dddddd"] +, ["cstretch", 663, "LATIN LETTER STRETCHED C"] +, ["uniA67E", 42622, "CYRILLIC KAVYKA", "#efefef"] +, ["uniA67D", 42621, "COMBINING CYRILLIC PAYEROK", "#f9e2e2"] +, ["uniA67F", 42623, "CYRILLIC PAYEROK"] +, ["uniA67A", 42618, "COMBINING CYRILLIC LETTER SOFT SIGN", "#f9e2e2"] +, ["uniA67C", 42620, "COMBINING CYRILLIC KAVYKA", "#f9e2e2"] +, ["uniA67B", 42619, "COMBINING CYRILLIC LETTER OMEGA", "#f9e2e2"] +, ["Btopbar", 386, "LATIN CAPITAL LETTER B WITH TOPBAR"] +, ["Koppa", 990, "GREEK LETTER KOPPA"] +, ["uni02E8_uni02E6_uni02E8", null, null, "#dddddd"] +, ["uni02E8_uni02E6_uni02E9", null, null, "#dddddd"] +, ["uni02E8_uni02E6_uni02E6", null, null, "#dddddd"] +, ["uni02E8_uni02E6_uni02E7", null, null, "#dddddd"] +, ["commaturnedmod", 699, "MODIFIER LETTER TURNED COMMA"] +, ["Thook", 428, "LATIN CAPITAL LETTER T WITH HOOK"] +, ["commareversedmod", 701, "MODIFIER LETTER REVERSED COMMA"] +, ["ringhalfleft", 703, "MODIFIER LETTER LEFT HALF RING"] +, ["Rsmallcap", 640, "LATIN LETTER SMALL CAPITAL R"] +, ["five.frac", null, null, "#dddddd"] +, ["Tonesix", 388, "LATIN CAPITAL LETTER TONE SIX"] +, ["tavdages", 738, "MODIFIER LETTER SMALL S"] +, ["beta1", 976, "GREEK BETA SYMBOL"] +, ["uni2046", 8262, "RIGHT SQUARE BRACKET WITH QUILL", "#efefef"] +, ["uni2045", 8261, "LEFT SQUARE BRACKET WITH QUILL", "#efefef"] +, ["uni204F", 8271, "REVERSED SEMICOLON", "#efefef"] +, ["uni204E", 8270, "LOW ASTERISK", "#efefef"] +, ["uni204D", 8269, "BLACK RIGHTWARDS BULLET", "#efefef"] +, ["uni204C", 8268, "BLACK LEFTWARDS BULLET", "#efefef"] +, ["uni204B", 8267, "REVERSED PILCROW SIGN", "#efefef"] +, ["uni204A", 8266, "TIRONIAN SIGN ET", "#efefef"] +, ["jsuper", 690, "MODIFIER LETTER SMALL J"] +, ["epsilon1", 603, "LATIN SMALL LETTER OPEN E"] +, ["CR", 13, "[Control]", "#ff4c4c"] +, ["uni02E7_uni02E7_uni02E5", null, null, "#dddddd"] +, ["uni02E7_uni02E7_uni02E6", null, null, "#dddddd"] +, ["uni02E7_uni02E7_uni02E8", null, null, "#dddddd"] +, ["uni02E7_uni02E7_uni02E9", null, null, "#dddddd"] +, ["slashlongnosp", 824, "COMBINING LONG SOLIDUS OVERLAY", "#f9e2e2"] +, ["uni02E6_uni02E8_uni02E8", null, null, "#dddddd"] +, ["uni02E6_uni02E8_uni02E9", null, null, "#dddddd"] +, ["uni02E6_uni02E8_uni02E6", null, null, "#dddddd"] +, ["uni02E6_uni02E8_uni02E7", null, null, "#dddddd"] +, ["uni02E6_uni02E8_uni02E5", null, null, "#dddddd"] +, ["uni02E9_uni02E8", null, null, "#dddddd"] +, ["uni1D10.cn", null, null, "#c4f2c1"] +, ["uni02E9_uni02E7", null, null, "#dddddd"] +, ["uni02E9_uni02E6", null, null, "#dddddd"] +, ["uni02E9_uni02E5", null, null, "#dddddd"] +, ["exclam1", 451, "LATIN LETTER RETROFLEX CLICK"] +, ["uniA771.cn", null, null, "#c4f2c1"] +, ["q.cn", null, null, "#c4f2c1"] +, ["Digamma", 988, "GREEK LETTER DIGAMMA"] +, ["candrabindunosp", 784, "COMBINING CANDRABINDU", "#f9e2e2"] +, ["khook", 409, "LATIN SMALL LETTER K WITH HOOK"] +, ["qofdagesh", 735, "MODIFIER LETTER CROSS ACCENT", "#cceff2"] +, ["uniA71B", 42779, "MODIFIER LETTER RAISED UP ARROW"] +, ["uniA71C", 42780, "MODIFIER LETTER RAISED DOWN ARROW"] +, ["uniA71A", 42778, "MODIFIER LETTER LOWER RIGHT CORNER ANGLE"] +, ["uniA71F", 42783, "MODIFIER LETTER LOW INVERTED EXCLAMATION MARK"] +, ["uniA71E", 42782, "MODIFIER LETTER RAISED INVERTED EXCLAMATION MARK"] +, ["uni036A", 874, "COMBINING LATIN SMALL LETTER H", "#f9e2e2"] +, ["uni036B", 875, "COMBINING LATIN SMALL LETTER M", "#f9e2e2"] +, ["uni036C", 876, "COMBINING LATIN SMALL LETTER R", "#f9e2e2"] +, ["uni036D", 877, "COMBINING LATIN SMALL LETTER T", "#f9e2e2"] +, ["uni036E", 878, "COMBINING LATIN SMALL LETTER V", "#f9e2e2"] +, ["uni036F", 879, "COMBINING LATIN SMALL LETTER X", "#f9e2e2"] +, ["uni0249.ccmp", null, null, "#dddddd"] +, ["uni20B7", 8375, "SPESMILO SIGN", "#cceff2"] +, ["uni20B6", 8374, "LIVRE TOURNOIS SIGN", "#cceff2"] +, ["uni20B0", 8368, "GERMAN PENNY SIGN", "#cceff2"] +, ["uni20BB", 8379, "NORDIC MARK SIGN", "#cceff2"] +, ["tildedoublecmb", 864, "COMBINING DOUBLE TILDE", "#f9e2e2"] +, ["breveinverteddoublecmb", 865, "COMBINING DOUBLE INVERTED BREVE", "#f9e2e2"] +, ["uni0362", 866, "COMBINING DOUBLE RIGHTWARDS ARROW BELOW", "#f9e2e2"] +, ["uni0363", 867, "COMBINING LATIN SMALL LETTER A", "#f9e2e2"] +, ["uni0364", 868, "COMBINING LATIN SMALL LETTER E", "#f9e2e2"] +, ["uni0365", 869, "COMBINING LATIN SMALL LETTER I", "#f9e2e2"] +, ["uni0366", 870, "COMBINING LATIN SMALL LETTER O", "#f9e2e2"] +, ["uni0367", 871, "COMBINING LATIN SMALL LETTER U", "#f9e2e2"] +, ["uni0368", 872, "COMBINING LATIN SMALL LETTER C", "#f9e2e2"] +, ["uni0369", 873, "COMBINING LATIN SMALL LETTER D", "#f9e2e2"] +, ["tildemidnosp", 820, "COMBINING TILDE OVERLAY", "#f9e2e2"] +, ["uniA718", 42776, "MODIFIER LETTER DOT SLASH"] +, ["uniA719", 42777, "MODIFIER LETTER DOT HORIZONTAL BAR"] +, ["uni01F6", 502, "LATIN CAPITAL LETTER HWAIR"] +, ["uni01F7", 503, "LATIN CAPITAL LETTER WYNN"] +, ["uni02E6_uni02E7_uni02E7", null, null, "#dddddd"] +, ["uni02E6_uni02E7_uni02E6", null, null, "#dddddd"] +, ["uni02E6_uni02E7_uni02E9", null, null, "#dddddd"] +, ["uni02E6_uni02E7_uni02E8", null, null, "#dddddd"] +, ["uniA710", 42768, "MODIFIER LETTER LOW DOTTED LEFT-STEM TONE BAR", "#cceff2"] +, ["uniA711", 42769, "MODIFIER LETTER EXTRA-LOW DOTTED LEFT-STEM TONE BAR", "#cceff2"] +, ["uniA716", 42774, "MODIFIER LETTER EXTRA-LOW LEFT-STEM TONE BAR", "#cceff2"] +, ["uniA717", 42775, "MODIFIER LETTER DOT VERTICAL BAR"] +, ["uniA714", 42772, "MODIFIER LETTER MID LEFT-STEM TONE BAR", "#cceff2"] +, ["uniA715", 42773, "MODIFIER LETTER LOW LEFT-STEM TONE BAR", "#cceff2"] +, ["uniA712", 42770, "MODIFIER LETTER EXTRA-HIGH LEFT-STEM TONE BAR", "#cceff2"] +, ["uniA713", 42771, "MODIFIER LETTER HIGH LEFT-STEM TONE BAR", "#cceff2"] +, ["uni1DA3", 7587, "MODIFIER LETTER SMALL TURNED H"] +, ["uniA66B", 42603, "CYRILLIC SMALL LETTER BINOCULAR O"] +, ["uniA66C", 42604, "CYRILLIC CAPITAL LETTER DOUBLE MONOCULAR O"] +, ["glottalstopinv", 662, "LATIN LETTER INVERTED GLOTTAL STOP"] +, ["uniA66A", 42602, "CYRILLIC CAPITAL LETTER BINOCULAR O"] +, ["uniA66F", 42607, "COMBINING CYRILLIC VZMET", "#f9e2e2"] +, ["pertenthousand", 8241, "PER TEN THOUSAND SIGN", "#efefef"] +, ["uniA66E", 42606, "CYRILLIC LETTER MULTIOCULAR O"] +, ["acutelowmod", 719, "MODIFIER LETTER LOW ACUTE ACCENT"] +, ["gravelowmod", 718, "MODIFIER LETTER LOW GRAVE ACCENT"] +, ["fourthtonechinese", 715, "MODIFIER LETTER GRAVE ACCENT"] +, ["verticallinelowmod", 716, "MODIFIER LETTER LOW VERTICAL LINE"] +, ["secondtonechinese", 714, "MODIFIER LETTER ACUTE ACCENT"] +, ["Tonefive", 444, "LATIN CAPITAL LETTER TONE FIVE"] +, ["romantwelve", 8555, "ROMAN NUMERAL TWELVE", "#e2f4ea"] +, ["arrowheadupmod", 708, "MODIFIER LETTER UP ARROWHEAD", "#cceff2"] +, ["arrowheaddownmod", 709, "MODIFIER LETTER DOWN ARROWHEAD", "#cceff2"] +, ["arrowheadleftmod", 706, "MODIFIER LETTER LEFT ARROWHEAD", "#cceff2"] +, ["arrowheadrightmod", 707, "MODIFIER LETTER RIGHT ARROWHEAD", "#cceff2"] +, ["glottalstopmod", 704, "MODIFIER LETTER GLOTTAL STOP"] +, ["firsttonechinese", 713, "MODIFIER LETTER MACRON"] +, ["five.sup", null, null, "#dddddd"] +, ["uni1D8A.cn", null, null, "#c4f2c1"] +, ["uni021D", 541, "LATIN SMALL LETTER YOGH"] +, ["uni0268.ccmp", null, null, "#dddddd"] +, ["uni021C", 540, "LATIN CAPITAL LETTER YOGH"] +, ["uniA662", 42594, "CYRILLIC CAPITAL LETTER SOFT DE"] +, ["uniA663", 42595, "CYRILLIC SMALL LETTER SOFT DE"] +, ["uniA660", 42592, "CYRILLIC CAPITAL LETTER REVERSED TSE"] +, ["uniA661", 42593, "CYRILLIC SMALL LETTER REVERSED TSE"] +, ["uniA666", 42598, "CYRILLIC CAPITAL LETTER SOFT EM"] +, ["uni02E8_uni02E8_uni02E5", null, null, "#dddddd"] +, ["uni02E8_uni02E8_uni02E6", null, null, "#dddddd"] +, ["uni02E8_uni02E8_uni02E7", null, null, "#dddddd"] +, ["uni02E8_uni02E8_uni02E9", null, null, "#dddddd"] +, ["uniA668", 42600, "CYRILLIC CAPITAL LETTER MONOCULAR O"] +, ["numeralgreeksub", 885, "GREEK LOWER NUMERAL SIGN", "#cceff2"] +, ["caronsubnosp", null, null, "#dddddd"] +, ["uni205E", 8286, "VERTICAL FOUR DOTS", "#efefef"] +, ["uni205A", 8282, "TWO DOT PUNCTUATION", "#efefef"] +, ["uni205B", 8283, "FOUR DOT MARK", "#efefef"] +, ["uni205C", 8284, "DOTTED CROSS", "#efefef"] +, ["schwahook", 602, "LATIN SMALL LETTER SCHWA WITH HOOK"] +, ["uni0458.ccmp", null, null, "#dddddd"] +, ["uni2E0D", 11789, "RIGHT RAISED OMISSION BRACKET", "#efefef"] +, ["uni2E0E", 11790, "EDITORIAL CORONIS", "#efefef"] +, ["uni1E9D", 7837, "LATIN SMALL LETTER LONG S WITH HIGH STROKE"] +, ["uni1E9F", 7839, "LATIN SMALL LETTER DELTA"] +, ["slongdotaccent", 7835, "LATIN SMALL LETTER LONG S WITH DOT ABOVE", ""] +, ["uni1E9C", 7836, "LATIN SMALL LETTER LONG S WITH DIAGONAL STROKE"] +, ["uniAB64", 43876, "LATIN SMALL LETTER INVERTED ALPHA"] +, ["uniAB65", 43877, "GREEK LETTER SMALL CAPITAL OMEGA"] +, ["Dtopbar", 395, "LATIN CAPITAL LETTER D WITH TOPBAR"] +, ["uni1EFE", 7934, "LATIN CAPITAL LETTER Y WITH LOOP"] +, ["uni1EFD", 7933, "LATIN SMALL LETTER MIDDLE-WELSH V"] +, ["uni1EFF", 7935, "LATIN SMALL LETTER Y WITH LOOP"] +, ["uni1EFA", 7930, "LATIN CAPITAL LETTER MIDDLE-WELSH LL"] +, ["uni1EFC", 7932, "LATIN CAPITAL LETTER MIDDLE-WELSH V"] +, ["uni1EFB", 7931, "LATIN SMALL LETTER MIDDLE-WELSH LL"] +, ["uni2058", 8280, "FOUR DOT PUNCTUATION", "#efefef"] +, ["uni2059", 8281, "FIVE DOT PUNCTUATION", "#efefef"] +, ["uni2054", 8276, "INVERTED UNDERTIE", "#efefef"] +, ["uni2055", 8277, "FLOWER PUNCTUATION MARK", "#efefef"] +, ["uni2056", 8278, "THREE DOT PUNCTUATION", "#efefef"] +, ["uni2057", 8279, "QUADRUPLE PRIME", "#efefef"] +, ["uni2050", 8272, "CLOSE UP", "#efefef"] +, ["uni2051", 8273, "TWO ASTERISKS ALIGNED VERTICALLY", "#efefef"] +, ["uni2052", 8274, "COMMERCIAL MINUS SIGN", "#cceff2"] +, ["uni2053", 8275, "SWUNG DASH", "#efefef"] +, ["uniA66D", 42605, "CYRILLIC SMALL LETTER DOUBLE MONOCULAR O"] +, ["ascript_uni02DE", null, null, "#dddddd"] +, ["longst", 64261, "LATIN SMALL LIGATURE LONG S T"] +, ["uni02E9_uni02E8_uni02E9", null, null, "#dddddd"] +, ["uni02E9_uni02E8_uni02E8", null, null, "#dddddd"] +, ["tesh", 679, "LATIN SMALL LETTER TESH DIGRAPH"] +, ["uni02E8_uni02E9_uni02E7", null, null, "#dddddd"] +, ["uni02E9_uni02E8_uni02E5", null, null, "#dddddd"] +, ["uni02E9_uni02E8_uni02E7", null, null, "#dddddd"] +, ["uni02E9_uni02E8_uni02E6", null, null, "#dddddd"] +, ["asterism", 8258, "ASTERISM", "#efefef"] +, ["philatin", 632, "LATIN SMALL LETTER PHI"] +, ["uniA75F", 42847, "LATIN SMALL LETTER V WITH DIAGONAL STROKE"] +, ["sixroman", 8565, "SMALL ROMAN NUMERAL SIX", "#e2f4ea"] +, ["Trthook", 430, "LATIN CAPITAL LETTER T WITH RETROFLEX HOOK"] +, ["st.cn", null, null, "#c4f2c1"] +, ["uni1ABE.w1", null, null, "#dddddd"] +, ["uni1ABE.w2", null, null, "#dddddd"] +, ["uni1ABE.w3", null, null, "#dddddd"] +, ["uni02E6_uni02E7_uni02E5", null, null, "#dddddd"] +, ["cyrillichookleft", null, null, "#dddddd"] +, ["uni27EC", 10220, "MATHEMATICAL LEFT WHITE TORTOISE SHELL BRACKET", "#efefef"] +, ["uni03D8", 984, "GREEK LETTER ARCHAIC KOPPA"] +, ["uni03D9", 985, "GREEK SMALL LETTER ARCHAIC KOPPA"] +, ["Upsilonhooksymbol", 978, "GREEK UPSILON WITH HOOK SYMBOL"] +, ["uniA75D", 42845, "LATIN SMALL LETTER RUM ROTUNDA"] +, ["thetasymbolgreek", 977, "GREEK THETA SYMBOL"] +, ["thook", 429, "LATIN SMALL LETTER T WITH HOOK"] +, ["uni03D7", 983, "GREEK KAI SYMBOL"] +, ["uni0504", 1284, "CYRILLIC CAPITAL LETTER KOMI ZJE"] +, ["uni27EF", 10223, "MATHEMATICAL RIGHT FLATTENED PARENTHESIS", "#efefef"] +, ["finaltsadi", 693, "MODIFIER LETTER SMALL TURNED R WITH HOOK"] +, ["uni27EE", 10222, "MATHEMATICAL LEFT FLATTENED PARENTHESIS", "#efefef"] +, ["uni27ED", 10221, "MATHEMATICAL RIGHT WHITE TORTOISE SHELL BRACKET", "#efefef"] +, ["uniA769", 42857, "LATIN SMALL LETTER VEND"] +, ["glottalstop", 660, "LATIN LETTER GLOTTAL STOP"] +, ["uni1D8F.cn", null, null, "#c4f2c1"] +, ["uniA766", 42854, "LATIN CAPITAL LETTER THORN WITH STROKE THROUGH DESCENDER"] +, ["uniA763", 42851, "LATIN SMALL LETTER VISIGOTHIC Z"] +, ["uni02E5_uni02E7", null, null, "#dddddd"] +, ["uni02E5_uni02E9", null, null, "#dddddd"] +, ["uni02E5_uni02E8", null, null, "#dddddd"] +, ["lbelt", 620, "LATIN SMALL LETTER L WITH BELT"] +, ["uni0359", 857, "COMBINING ASTERISK BELOW", "#f9e2e2"] +, ["uni0358", 856, "COMBINING DOT ABOVE RIGHT", "#f9e2e2"] +, ["epsilon1revclosed", 606, "LATIN SMALL LETTER CLOSED REVERSED OPEN E"] +, ["uni0355", 853, "COMBINING RIGHT ARROWHEAD BELOW", "#f9e2e2"] +, ["uni0354", 852, "COMBINING LEFT ARROWHEAD BELOW", "#f9e2e2"] +, ["uni0357", 855, "COMBINING RIGHT HALF RING ABOVE", "#f9e2e2"] +, ["uni0356", 854, "COMBINING RIGHT ARROWHEAD AND UP ARROWHEAD BELOW", "#f9e2e2"] +, ["uni0351", 849, "COMBINING LEFT HALF RING ABOVE", "#f9e2e2"] +, ["uni0350", 848, "COMBINING RIGHT ARROWHEAD ABOVE", "#f9e2e2"] +, ["uni0353", 851, "COMBINING X BELOW", "#f9e2e2"] +, ["uni0352", 850, "COMBINING FERMATA", "#f9e2e2"] +, ["glottalstopreversedmod", 705, "MODIFIER LETTER REVERSED GLOTTAL STOP"] +, ["uni02E5_uni02E6_uni02E6", null, null, "#dddddd"] +, ["cadauna", 8454, "CADA UNA", "#cceff2"] +, ["lmidtilde_uni1ABE", null, null, "#dddddd"] +, ["uni02E5_uni02E6_uni02E9", null, null, "#dddddd"] +, ["uni02E5_uni02E6_uni02E8", null, null, "#dddddd"] +, ["U.cn", null, null, "#c4f2c1"] +, ["uni035E", 862, "COMBINING DOUBLE MACRON", "#f9e2e2"] +, ["uni0220", 544, "LATIN CAPITAL LETTER N WITH LONG RIGHT LEG"] +, ["uni0223", 547, "LATIN SMALL LETTER OU"] +, ["uni035F", 863, "COMBINING DOUBLE MACRON BELOW", "#f9e2e2"] +, ["uni0225", 549, "LATIN SMALL LETTER Z WITH HOOK"] +, ["uni0224", 548, "LATIN CAPITAL LETTER Z WITH HOOK"] +, ["uni035C", 860, "COMBINING DOUBLE BREVE BELOW", "#f9e2e2"] +, ["uni035B", 859, "COMBINING ZIGZAG ABOVE", "#f9e2e2"] +, ["digamma", 989, "GREEK SMALL LETTER DIGAMMA"] +, ["koppa", 991, "GREEK SMALL LETTER KOPPA"] +, ["uni02E7_uni02E8_uni02E7", null, null, "#dddddd"] +, ["ringhalfrightcentered", 722, "MODIFIER LETTER CENTRED RIGHT HALF RING", "#cceff2"] +, ["colontriangularhalfmod", 721, "MODIFIER LETTER HALF TRIANGULAR COLON"] +, ["colontriangularmod", 720, "MODIFIER LETTER TRIANGULAR COLON"] +, ["minusmod", 727, "MODIFIER LETTER MINUS SIGN", "#cceff2"] +, ["plusmod", 726, "MODIFIER LETTER PLUS SIGN", "#cceff2"] +, ["downtackmod", 725, "MODIFIER LETTER DOWN TACK", "#cceff2"] +, ["linevertnosp", 781, "COMBINING VERTICAL LINE ABOVE", "#f9e2e2"] +, ["eshlooprev", 426, "LATIN LETTER REVERSED ESH LOOP"] +, ["uni02E7_uni02E8_uni02E9", null, null, "#dddddd"] +, ["uni02E7_uni02E8_uni02E8", null, null, "#dddddd"] +, ["uni1DBE", 7614, "MODIFIER LETTER SMALL EZH"] +, ["verticalbardbl", 8214, "DOUBLE VERTICAL LINE", "#efefef"] +, ["uniA699", 42649, "CYRILLIC SMALL LETTER DOUBLE O"] +, ["uniA698", 42648, "CYRILLIC CAPITAL LETTER DOUBLE O"] +, ["uniA697", 42647, "CYRILLIC SMALL LETTER SHWE"] +, ["uniA696", 42646, "CYRILLIC CAPITAL LETTER SHWE"] +, ["uniA695", 42645, "CYRILLIC SMALL LETTER HWE"] +, ["romaneleven", 8554, "ROMAN NUMERAL ELEVEN", "#e2f4ea"] +, ["uniA693", 42643, "CYRILLIC SMALL LETTER TCHE"] +, ["uniA692", 42642, "CYRILLIC CAPITAL LETTER TCHE"] +, ["uniA691", 42641, "CYRILLIC SMALL LETTER TSSE"] +, ["uniA690", 42640, "CYRILLIC CAPITAL LETTER TSSE"] +, ["Iota1", 406, "LATIN CAPITAL LETTER IOTA"] +, ["uniA69F", 42655, "COMBINING CYRILLIC LETTER IOTIFIED E", "#f9e2e2"] +, ["uniA69D", 42653, "MODIFIER LETTER CYRILLIC SOFT SIGN"] +, ["uniA69C", 42652, "MODIFIER LETTER CYRILLIC HARD SIGN"] +, ["uniA69B", 42651, "CYRILLIC SMALL LETTER CROSSED O"] +, ["uniA69A", 42650, "CYRILLIC CAPITAL LETTER CROSSED O"] +, ["uniA667", 42599, "CYRILLIC SMALL LETTER SOFT EM"] +, ["uniA664", 42596, "CYRILLIC CAPITAL LETTER SOFT EL"] +, ["uniA73A", 42810, "LATIN CAPITAL LETTER AV WITH HORIZONTAL BAR"] +, ["uniA73B", 42811, "LATIN SMALL LETTER AV WITH HORIZONTAL BAR"] +, ["lsquare", 8467, "SCRIPT SMALL L"] +, ["uniA73C", 42812, "LATIN CAPITAL LETTER AY"] +, ["carondotcomb", null, null, "#dddddd"] +, ["rhotichookmod", 734, "MODIFIER LETTER RHOTIC HOOK", "#cceff2"] +, ["uniA669", 42601, "CYRILLIC SMALL LETTER MONOCULAR O"] +, ["uni1DD6", 7638, "COMBINING LATIN SMALL LETTER AV", "#f9e2e2"] +, ["uni1DD7", 7639, "COMBINING LATIN SMALL LETTER C CEDILLA", "#f9e2e2"] +, ["uniAB36.cn", null, null, "#c4f2c1"] +, ["uni051F", 1311, "CYRILLIC SMALL LETTER ALEUT KA"] +, ["oneeighth", 8539, "VULGAR FRACTION ONE EIGHTH", "#e2f4ea"] +, ["linevertsubnosp", 809, "COMBINING VERTICAL LINE BELOW", "#f9e2e2"] +, ["uniA7FE", 43006, "LATIN EPIGRAPHIC LETTER I LONGA"] +, ["uniA7FD", 43005, "LATIN EPIGRAPHIC LETTER INVERTED M"] +, ["uniA7FC", 43004, "LATIN EPIGRAPHIC LETTER REVERSED P"] +, ["uniA7FB", 43003, "LATIN EPIGRAPHIC LETTER REVERSED F"] +, ["uniA7FA", 43002, "LATIN LETTER SMALL CAPITAL TURNED M"] +, ["uni0517", 1303, "CYRILLIC SMALL LETTER RHA"] +, ["uni0516", 1302, "CYRILLIC CAPITAL LETTER RHA"] +, ["uni0515", 1301, "CYRILLIC SMALL LETTER LHA"] +, ["uni0514", 1300, "CYRILLIC CAPITAL LETTER LHA"] +, ["uniA7F7", 42999, "LATIN EPIGRAPHIC LETTER SIDEWAYS I"] +, ["uni0519", 1305, "CYRILLIC SMALL LETTER YAE"] +, ["uni0518", 1304, "CYRILLIC CAPITAL LETTER YAE"] +, ["mcapturn", 412, "LATIN CAPITAL LETTER TURNED M"] +, ["uni2E02", 11778, "LEFT SUBSTITUTION BRACKET", "#efefef"] +, ["st", 64262, "LATIN SMALL LIGATURE ST"] +, ["uni2E00", 11776, "RIGHT ANGLE SUBSTITUTION MARKER", "#efefef"] +, ["zbar", 438, "LATIN SMALL LETTER Z WITH STROKE"] +, ["Rx", 8478, "PRESCRIPTION TAKE", "#cceff2"] +, ["uni205D", 8285, "TRICOLON", "#efefef"] +, ["Q.cn", null, null, "#c4f2c1"] +, ["uni02E7_uni02E5_uni02E6", null, null, "#dddddd"] +, ["uni02E7_uni02E5_uni02E7", null, null, "#dddddd"] +, ["uni02E7_uni02E5_uni02E5", null, null, "#dddddd"] +, ["uni02E7_uni02E5_uni02E8", null, null, "#dddddd"] +, ["uni02E7_uni02E5_uni02E9", null, null, "#dddddd"] +, ["uni1ABA_uni1ABD", null, null, "#dddddd"] +, ["uni02E5_uni02E9_uni02E7", null, null, "#dddddd"] +, ["uni02E6_uni02E5_uni02E9", null, null, "#dddddd"] +, ["uni02E6_uni02E5_uni02E8", null, null, "#dddddd"] +, ["uni02E6_uni02E5_uni02E6", null, null, "#dddddd"] +, ["uniA73F.cn", null, null, "#c4f2c1"] +, ["spaceopenbox", 9251, "OPEN BOX", "#cceff2"] +, ["uni02E8_uni02E7_uni02E7", null, null, "#dddddd"] +, ["uni02E9_uni02E9_uni02E8", null, null, "#dddddd"] +, ["uni02E8_uni02E7_uni02E5", null, null, "#dddddd"] +, ["uni02E8_uni02E5_uni02E9", null, null, "#dddddd"] +, ["uni02E8_uni02E5_uni02E8", null, null, "#dddddd"] +, ["uni02E8_uni02E5_uni02E5", null, null, "#dddddd"] +, ["uni02E8_uni02E5_uni02E7", null, null, "#dddddd"] +, ["uni02E8_uni02E5_uni02E6", null, null, "#dddddd"] +, ["uni02E8_uni02E7_uni02E9", null, null, "#dddddd"] +, ["uni02E8_uni02E7_uni02E8", null, null, "#dddddd"] +, ["shindagesh", 737, "MODIFIER LETTER SMALL L"] +, ["uni034B", 843, "COMBINING HOMOTHETIC ABOVE", "#f9e2e2"] +, ["uni034C", 844, "COMBINING ALMOST EQUAL TO ABOVE", "#f9e2e2"] +, ["uni034A", 842, "COMBINING NOT TILDE ABOVE", "#f9e2e2"] +, ["uni034F", 847, "COMBINING GRAPHEME JOINER", "#f9e2e2"] +, ["uni034D", 845, "COMBINING LEFT RIGHT ARROW BELOW", "#f9e2e2"] +, ["uni034E", 846, "COMBINING UPWARDS ARROW BELOW", "#f9e2e2"] +, ["uni27E7", 10215, "MATHEMATICAL RIGHT WHITE SQUARE BRACKET", "#efefef"] +, ["uni27E6", 10214, "MATHEMATICAL LEFT WHITE SQUARE BRACKET", "#efefef"] +, ["ibar", 616, "LATIN SMALL LETTER I WITH STROKE"] +, ["uni0348", 840, "COMBINING DOUBLE VERTICAL LINE BELOW", "#f9e2e2"] +, ["uni0349", 841, "COMBINING LEFT ANGLE BELOW", "#f9e2e2"] +, ["reverseddblprime", 8246, "REVERSED DOUBLE PRIME", "#efefef"] +, ["perispomenigreekcmb", 834, "COMBINING GREEK PERISPOMENI", "#f9e2e2"] +, ["koroniscmb", 835, "COMBINING GREEK KORONIS", "#f9e2e2"] +, ["uni0347", 839, "COMBINING EQUALS SIGN BELOW", "#f9e2e2"] +, ["uni0236", 566, "LATIN SMALL LETTER T WITH CURL"] +, ["uni0234", 564, "LATIN SMALL LETTER L WITH CURL"] +, ["uni0235", 565, "LATIN SMALL LETTER N WITH CURL"] +, ["nlfthook", 626, "LATIN SMALL LETTER N WITH LEFT HOOK"] +, ["uniA68D", 42637, "CYRILLIC SMALL LETTER TWE"] +, ["uniA68E", 42638, "CYRILLIC CAPITAL LETTER TSWE"] +, ["uniA68A", 42634, "CYRILLIC CAPITAL LETTER TE WITH MIDDLE HOOK"] +, ["uni0238", 568, "LATIN SMALL LETTER DB DIGRAPH"] +, ["uni0239", 569, "LATIN SMALL LETTER QP DIGRAPH"] +, ["tackdownsubnosp", 798, "COMBINING DOWN TACK BELOW", "#f9e2e2"] +, ["Fhook", 401, "LATIN CAPITAL LETTER F WITH HOOK"] +, ["uni02EA", 746, "MODIFIER LETTER YIN DEPARTING TONE MARK", "#cceff2"] +, ["uni02EB", 747, "MODIFIER LETTER YANG DEPARTING TONE MARK", "#cceff2"] +, ["uni2071.ccmp", null, null, "#dddddd"] +, ["uni02ED", 749, "MODIFIER LETTER UNASPIRATED", "#cceff2"] +, ["uni02EF", 751, "MODIFIER LETTER LOW DOWN ARROWHEAD", "#cceff2"] +, ["Khook", 408, "LATIN CAPITAL LETTER K WITH HOOK"] +, ["glottalstopreversedsuperior", 740, "MODIFIER LETTER SMALL REVERSED GLOTTAL STOP"] +, ["six.sup", null, null, "#dddddd"] +, ["tonebarhighmod", 742, "MODIFIER LETTER HIGH TONE BAR", "#cceff2"] +, ["tonebarmidmod", 743, "MODIFIER LETTER MID TONE BAR", "#cceff2"] +, ["tonebarlowmod", 744, "MODIFIER LETTER LOW TONE BAR", "#cceff2"] +, ["tonebarextralowmod", 745, "MODIFIER LETTER EXTRA-LOW TONE BAR", "#cceff2"] +, ["Fturn", 8498, "TURNED CAPITAL F"] +, ["uni023F", 575, "LATIN SMALL LETTER S WITH SWASH TAIL"] +, ["p.cn", null, null, "#c4f2c1"] +, ["uni023E", 574, "LATIN CAPITAL LETTER T WITH DIAGONAL STROKE"] +, ["uni023B", 571, "LATIN CAPITAL LETTER C WITH STROKE"] +, ["uni023C", 572, "LATIN SMALL LETTER C WITH STROKE"] +, ["uni023A", 570, "LATIN CAPITAL LETTER A WITH STROKE"] +, ["uniA684", 42628, "CYRILLIC CAPITAL LETTER ZHWE"] +, ["uniA685", 42629, "CYRILLIC SMALL LETTER ZHWE"] +, ["uniA686", 42630, "CYRILLIC CAPITAL LETTER CCHE"] +, ["uniA687", 42631, "CYRILLIC SMALL LETTER CCHE"] +, ["uniA680", 42624, "CYRILLIC CAPITAL LETTER DWE"] +, ["uniA681", 42625, "CYRILLIC SMALL LETTER DWE"] +, ["uniA682", 42626, "CYRILLIC CAPITAL LETTER DZWE"] +, ["uniA683", 42627, "CYRILLIC SMALL LETTER DZWE"] +, ["uniA738", 42808, "LATIN CAPITAL LETTER AV"] +, ["afii61664", 8204, "ZERO WIDTH NON-JOINER", "#efefef"] +, ["uni1DC6", 7622, "COMBINING MACRON-GRAVE", "#f9e2e2"] +, ["uni1DC4", 7620, "COMBINING MACRON-ACUTE", "#f9e2e2"] +, ["babygamma", 612, "LATIN SMALL LETTER RAMS HORN"] +, ["Vcursive", 434, "LATIN CAPITAL LETTER V WITH HOOK"] +, ["careof", 8453, "CARE OF", "#cceff2"] +, ["phi1", 981, "GREEK PHI SYMBOL"] +, ["uni02E5_uni02E5_uni02E8", null, null, "#dddddd"] +, ["uni02E5_uni02E5_uni02E9", null, null, "#dddddd"] +, ["uni1D92.cn", null, null, "#c4f2c1"] +, ["uni2C68", 11368, "LATIN SMALL LETTER H WITH DESCENDER"] +, ["phook.cn", null, null, "#c4f2c1"] +, ["breveacutecomb", null, null, "#dddddd"] +, ["uni2C69", 11369, "LATIN CAPITAL LETTER K WITH DESCENDER", ""] +, ["uni2C66", 11366, "LATIN SMALL LETTER T WITH DIAGONAL STROKE"] +, ["mhook", 625, "LATIN SMALL LETTER M WITH HOOK"] +, ["uni2C67", 11367, "LATIN CAPITAL LETTER H WITH DESCENDER"] +, ["uni0500", 1280, "CYRILLIC CAPITAL LETTER KOMI DE"] +, ["uni27EB", 10219, "MATHEMATICAL RIGHT DOUBLE ANGLE BRACKET", "#efefef"] +, ["uni27EA", 10218, "MATHEMATICAL LEFT DOUBLE ANGLE BRACKET", "#efefef"] +, ["uni0503", 1283, "CYRILLIC SMALL LETTER KOMI DJE"] +, ["wynn", 447, "LATIN LETTER WYNN"] +, ["uni0505", 1285, "CYRILLIC SMALL LETTER KOMI ZJE"] +, ["uni0506", 1286, "CYRILLIC CAPITAL LETTER KOMI DZJE"] +, ["uni0507", 1287, "CYRILLIC SMALL LETTER KOMI DZJE"] +, ["uni0508", 1288, "CYRILLIC CAPITAL LETTER KOMI LJE"] +, ["uni0509", 1289, "CYRILLIC SMALL LETTER KOMI LJE"] +, ["rturn", 633, "LATIN SMALL LETTER TURNED R"] +, ["referencemark", 8251, "REFERENCE MARK", "#efefef"] +, ["Ghook", 403, "LATIN CAPITAL LETTER G WITH HOOK"] +, ["overscorenosp", 773, "COMBINING OVERLINE", "#f9e2e2"] +, ["breveinvsubnosp_uni1ABD", null, null, "#dddddd"] +, ["commaaboverightcomb", null, null, "#dddddd"] +, ["uni02E5_uni02E9_uni02E6", null, null, "#dddddd"] +, ["versicle", 8483, "VERSICLE", "#cceff2"] +, ["hyphen__", 8208, "HYPHEN", "#efefef"] +, ["uni02E5_uni02E9_uni02E8", null, null, "#dddddd"] +, ["uni02E5_uni02E9_uni02E9", null, null, "#dddddd"] +, ["uni050A", 1290, "CYRILLIC CAPITAL LETTER KOMI NJE"] +, ["uni050B", 1291, "CYRILLIC SMALL LETTER KOMI NJE"] +, ["uni050C", 1292, "CYRILLIC CAPITAL LETTER KOMI SJE"] +, ["uni050D", 1293, "CYRILLIC SMALL LETTER KOMI SJE"] +, ["uni050E", 1294, "CYRILLIC CAPITAL LETTER KOMI TJE"] +, ["uni050F", 1295, "CYRILLIC SMALL LETTER KOMI TJE"] +, ["lrthook", 621, "LATIN SMALL LETTER L WITH RETROFLEX HOOK"] +, ["uni27E9", 10217, "MATHEMATICAL RIGHT ANGLE BRACKET", "#efefef"] +, ["uni27E8", 10216, "MATHEMATICAL LEFT ANGLE BRACKET", "#efefef"] +, ["diaeresistonosnosp", 836, "COMBINING GREEK DIALYTIKA TONOS", "#f9e2e2"] +, ["stigma", 987, "GREEK SMALL LETTER STIGMA"] +, ["uni051E", 1310, "CYRILLIC CAPITAL LETTER ALEUT KA"] +, ["afii57596", 8206, "LEFT-TO-RIGHT MARK", "#efefef"] +, ["afii57598", 8205, "ZERO WIDTH JOINER", "#efefef"] +, ["uni2B4E", 11086, "SHORT SLANTED NORTH ARROW", "#cceff2"] +, ["uni2B4F", 11087, "SHORT BACKSLANTED SOUTH ARROW", "#cceff2"] +, ["rturnascend", 634, "LATIN SMALL LETTER TURNED R WITH LONG LEG"] +, ["romanfivehundred", 8558, "ROMAN NUMERAL FIVE HUNDRED", "#e2f4ea"] +, ["uni0069.ccmp", null, null, "#dddddd"] +, ["uni2C6D", 11373, "LATIN CAPITAL LETTER ALPHA"] +, ["caret", 8248, "CARET", "#efefef"] +, ["uni024B", 587, "LATIN SMALL LETTER Q WITH HOOK TAIL"] +, ["uni024A", 586, "LATIN CAPITAL LETTER SMALL Q WITH HOOK TAIL"] +, ["uni024F", 591, "LATIN SMALL LETTER Y WITH STROKE"] +, ["uni024E", 590, "LATIN CAPITAL LETTER Y WITH STROKE"] +, ["uni024D", 589, "LATIN SMALL LETTER R WITH STROKE"] +, ["uni2C6B", 11371, "LATIN CAPITAL LETTER Z WITH DESCENDER", ""] +, ["uniA786", 42886, "LATIN CAPITAL LETTER INSULAR T"] +, ["caretinsertionpoint", 8257, "CARET INSERTION POINT", "#efefef"] +, ["bridgesubnosp", 810, "COMBINING BRIDGE BELOW", "#f9e2e2"] +, ["uni0243", 579, "LATIN CAPITAL LETTER B WITH STROKE"] +, ["uni0242", 578, "LATIN SMALL LETTER GLOTTAL STOP"] +, ["uni0241", 577, "LATIN CAPITAL LETTER GLOTTAL STOP"] +, ["uni0240", 576, "LATIN SMALL LETTER Z WITH SWASH TAIL"] +, ["uni0247", 583, "LATIN SMALL LETTER E WITH STROKE"] +, ["uni0246", 582, "LATIN CAPITAL LETTER E WITH STROKE"] +, ["uni0245", 581, "LATIN CAPITAL LETTER TURNED V"] +, ["uni0244", 580, "LATIN CAPITAL LETTER U BAR"] +, ["uni0249", 585, "LATIN SMALL LETTER J WITH STROKE"] +, ["uni0248", 584, "LATIN CAPITAL LETTER J WITH STROKE"] +, ["uniA739", 42809, "LATIN SMALL LETTER AV"] +, ["accountof", 8448, "ACCOUNT OF", "#cceff2"] +, ["uni02F9", 761, "MODIFIER LETTER BEGIN HIGH TONE", "#cceff2"] +, ["uni02F8", 760, "MODIFIER LETTER RAISED COLON", "#cceff2"] +, ["uni02F5", 757, "MODIFIER LETTER MIDDLE DOUBLE GRAVE ACCENT", "#cceff2"] +, ["uni02F4", 756, "MODIFIER LETTER MIDDLE GRAVE ACCENT", "#cceff2"] +, ["uni02F7", 759, "MODIFIER LETTER LOW TILDE", "#cceff2"] +, ["uni02F6", 758, "MODIFIER LETTER MIDDLE DOUBLE ACUTE ACCENT", "#cceff2"] +, ["uni02F1", 753, "MODIFIER LETTER LOW LEFT ARROWHEAD", "#cceff2"] +, ["uni02F0", 752, "MODIFIER LETTER LOW UP ARROWHEAD", "#cceff2"] +, ["uni02F3", 755, "MODIFIER LETTER LOW RING", "#cceff2"] +, ["uni02F2", 754, "MODIFIER LETTER LOW RIGHT ARROWHEAD", "#cceff2"] +, ["dotmacroncomb.lc", null, null, "#dddddd"] +, ["uni1D39", 7481, "MODIFIER LETTER CAPITAL M"] +, ["uni1D38", 7480, "MODIFIER LETTER CAPITAL L"] +, ["uni1D37", 7479, "MODIFIER LETTER CAPITAL K"] +, ["fiveeighths", 8541, "VULGAR FRACTION FIVE EIGHTHS", "#e2f4ea"] +, ["uni1D35", 7477, "MODIFIER LETTER CAPITAL I"] +, ["uni1D34", 7476, "MODIFIER LETTER CAPITAL H"] +, ["uni1D33", 7475, "MODIFIER LETTER CAPITAL G"] +, ["uni1D32", 7474, "MODIFIER LETTER CAPITAL REVERSED E"] +, ["uni1D31", 7473, "MODIFIER LETTER CAPITAL E"] +, ["uni1D30", 7472, "MODIFIER LETTER CAPITAL D"] +, ["zcurl", 657, "LATIN SMALL LETTER Z WITH CURL"] +, ["uni2C60", 11360, "LATIN CAPITAL LETTER L WITH DOUBLE BAR"] +, ["oopen.cn", null, null, "#c4f2c1"] +, ["uni1D3F", 7487, "MODIFIER LETTER CAPITAL R"] +, ["uni1D3E", 7486, "MODIFIER LETTER CAPITAL P"] +, ["uni1D3D", 7485, "MODIFIER LETTER CAPITAL OU"] +, ["uni1D3C", 7484, "MODIFIER LETTER CAPITAL O"] +, ["uni1D3B", 7483, "MODIFIER LETTER CAPITAL REVERSED N"] +, ["uni1D3A", 7482, "MODIFIER LETTER CAPITAL N"] +, ["uni02FE", 766, "MODIFIER LETTER OPEN SHELF", "#cceff2"] +, ["uni02FD", 765, "MODIFIER LETTER SHELF", "#cceff2"] +, ["uni02FF", 767, "MODIFIER LETTER LOW LEFT ARROW", "#cceff2"] +, ["uni02FA", 762, "MODIFIER LETTER END HIGH TONE", "#cceff2"] +, ["uni02FC", 764, "MODIFIER LETTER END LOW TONE", "#cceff2"] +, ["uni02FB", 763, "MODIFIER LETTER BEGIN LOW TONE", "#cceff2"] +, ["dhook.cn", null, null, "#c4f2c1"] +, ["uniAB35", 43829, "LATIN SMALL LETTER LENIS F"] +, ["uniAB34", 43828, "LATIN SMALL LETTER E WITH FLOURISH"] +, ["uniAB31", 43825, "LATIN SMALL LETTER A REVERSED-SCHWA"] +, ["uniAB30", 43824, "LATIN SMALL LETTER BARRED ALPHA"] +, ["uniAB33", 43827, "LATIN SMALL LETTER BARRED E"] +, ["uni2185.cn", null, null, "#c4f2c1"] +, ["angstrom", 8491, "ANGSTROM SIGN"] +, ["macrongravecomb.lc", null, null, "#dddddd"] +, ["macronsubnosp", 817, "COMBINING MACRON BELOW", "#f9e2e2"] +, ["uniAB3E", 43838, "LATIN SMALL LETTER BLACKLETTER O WITH STROKE"] +, ["uniAB3D", 43837, "LATIN SMALL LETTER BLACKLETTER O"] +, ["uniAB3F", 43839, "LATIN SMALL LETTER OPEN O WITH STROKE"] +, ["uniAB3A", 43834, "LATIN SMALL LETTER M WITH CROSSED-TAIL"] +, ["uniAB3C", 43836, "LATIN SMALL LETTER ENG WITH CROSSED-TAIL"] +, ["ringlefthalfsubnosp", 796, "COMBINING LEFT HALF RING BELOW", "#f9e2e2"] +, ["Lsmallcap", 671, "LATIN LETTER SMALL CAPITAL L"] +, ["uniA75E", 42846, "LATIN CAPITAL LETTER V WITH DIAGONAL STROKE"] +, ["uniA75B", 42843, "LATIN SMALL LETTER R ROTUNDA"] +, ["squaresubnosp", 827, "COMBINING SQUARE BELOW", "#f9e2e2"] +, ["uniA75C", 42844, "LATIN CAPITAL LETTER RUM ROTUNDA"] +, ["Schwa", 399, "LATIN CAPITAL LETTER SCHWA"] +, ["uni1D83", 7555, "LATIN SMALL LETTER G WITH PALATAL HOOK"] +, ["uni1D80", 7552, "LATIN SMALL LETTER B WITH PALATAL HOOK"] +, ["linevertsubnosp_uni1ABD", null, null, "#dddddd"] +, ["uni2B5B", 11099, "BACKSLANTED SOUTH ARROW WITH HOOKED TAIL", "#cceff2"] +, ["uni2B5C", 11100, "SLANTED NORTH ARROW WITH HORIZONTAL TAIL", "#cceff2"] +, ["uni2B5A", 11098, "SLANTED NORTH ARROW WITH HOOKED HEAD", "#cceff2"] +, ["uni2B5F", 11103, "SHORT BENT ARROW POINTING DOWNWARDS THEN NORTH EAST", "#cceff2"] +, ["uni2B5D", 11101, "BACKSLANTED SOUTH ARROW WITH HORIZONTAL TAIL", "#cceff2"] +, ["uni2B5E", 11102, "BENT ARROW POINTING DOWNWARDS THEN NORTH EAST", "#cceff2"] +, ["uni024C", 588, "LATIN CAPITAL LETTER R WITH STROKE"] +, ["uniA723", 42787, "LATIN SMALL LETTER EGYPTOLOGICAL ALEF"] +, ["three.sup", null, null, "#dddddd"] +, ["dcroat_uni1ABE", null, null, "#dddddd"] +, ["oi", 419, "LATIN SMALL LETTER OI"] +, ["uni1ABC.w2", null, null, "#dddddd"] +, ["uni1ABC.w3", null, null, "#dddddd"] +, ["uni1ABC.w1", null, null, "#dddddd"] +, ["uniA797.cn", null, null, "#c4f2c1"] +, ["Tonetwo", 423, "LATIN CAPITAL LETTER TONE TWO"] +, ["uni1DA4.ccmp", null, null, "#dddddd"] +, ["a.cn", null, null, "#c4f2c1"] +, ["uni02E6_uni02E5_uni02E5", null, null, "#dddddd"] +, ["henghook", 615, "LATIN SMALL LETTER HENG WITH HOOK"] +, ["two.sup", null, null, "#dddddd"] +, ["o_uni02DE", null, null, "#dddddd"] +, ["uni1D4C", 7500, "MODIFIER LETTER SMALL TURNED OPEN E"] +, ["finalmem", 685, "LATIN LETTER BIDENTAL PERCUSSIVE"] +, ["uni012F.ccmp", null, null, "#dddddd"] +, ["Gsmallcap", 610, "LATIN LETTER SMALL CAPITAL G"] +, ["Esh", 425, "LATIN CAPITAL LETTER ESH"] +, ["uni1D2D", 7469, "MODIFIER LETTER CAPITAL AE"] +, ["uni1D2E", 7470, "MODIFIER LETTER CAPITAL B"] +, ["uni1D2F", 7471, "MODIFIER LETTER CAPITAL BARRED B"] +, ["uni1D2A", 7466, "GREEK LETTER SMALL CAPITAL PSI"] +, ["uni1D2B", 7467, "CYRILLIC LETTER SMALL CAPITAL EL"] +, ["uni1D2C", 7468, "MODIFIER LETTER CAPITAL A"] +, ["zero.sup", null, null, "#dddddd"] +, ["acutedotcomb", null, null, "#dddddd"] +, ["ghook.cn", null, null, "#c4f2c1"] +, ["uni02E7_uni02E6_uni02E5", null, null, "#dddddd"] +, ["uni02E7_uni02E6_uni02E7", null, null, "#dddddd"] +, ["uni02E7_uni02E6_uni02E9", null, null, "#dddddd"] +, ["uni02E7_uni02E6_uni02E8", null, null, "#dddddd"] +, ["dieresismacroncomb.lc", null, null, "#dddddd"] +, ["brevegravecomb", null, null, "#dddddd"] +, ["uni1D28", 7464, "GREEK LETTER SMALL CAPITAL PI"] +, ["uni1D29", 7465, "GREEK LETTER SMALL CAPITAL RHO"] +, ["uni1D24", 7460, "LATIN LETTER VOICED LARYNGEAL SPIRANT"] +, ["uni1D25", 7461, "LATIN LETTER AIN"] +, ["uni1D26", 7462, "GREEK LETTER SMALL CAPITAL GAMMA"] +, ["uni1D27", 7463, "GREEK LETTER SMALL CAPITAL LAMDA"] +, ["uni1D20", 7456, "LATIN LETTER SMALL CAPITAL V"] +, ["uni1D21", 7457, "LATIN LETTER SMALL CAPITAL W"] +, ["uni1D22", 7458, "LATIN LETTER SMALL CAPITAL Z"] +, ["uni1D23", 7459, "LATIN LETTER SMALL CAPITAL EZH"] +, ["circumflexacutecomb", null, null, "#dddddd"] +, ["Rsmallcapinv", 641, "LATIN LETTER SMALL CAPITAL INVERTED R"] +, ["uni02E5_uni02E7_uni02E9", null, null, "#dddddd"] +, ["uni02E5_uni02E7_uni02E6", null, null, "#dddddd"] +, ["uni02E5_uni02E7_uni02E7", null, null, "#dddddd"] +, ["uni02E5_uni02E7_uni02E5", null, null, "#dddddd"] +, ["zerowidthjoiner", 65279, "ZERO WIDTH NO-BREAK SPACE", "#efefef"] +, ["uni02E6_uni02E9_uni02E7", null, null, "#dddddd"] +, ["uni02E6_uni02E9_uni02E5", null, null, "#dddddd"] +, ["placeofinterestsign", 8984, "PLACE OF INTEREST SIGN", "#cceff2"] +, ["ts", 678, "LATIN SMALL LETTER TS DIGRAPH"] +, ["uni02E6_uni02E9_uni02E9", null, null, "#dddddd"] +, ["tonesix", 389, "LATIN SMALL LETTER TONE SIX"] +, ["uni2E37", 11831, "DAGGER WITH RIGHT GUARD", "#efefef"] +, ["uni2E36", 11830, "DAGGER WITH LEFT GUARD", "#efefef"] +, ["uni2E35", 11829, "TURNED SEMICOLON", "#efefef"] +, ["uni2E34", 11828, "RAISED COMMA", "#efefef"] +, ["uni2E33", 11827, "RAISED DOT", "#efefef"] +, ["uni2E32", 11826, "TURNED COMMA", "#efefef"] +, ["uni2E31", 11825, "WORD SEPARATOR MIDDLE DOT", "#efefef"] +, ["uni2E30", 11824, "RING POINT", "#efefef"] +, ["uni2E39", 11833, "TOP HALF SECTION SIGN", "#efefef"] +, ["uni2E38", 11832, "TURNED DAGGER", "#efefef"] +, ["twothirds", 8532, "VULGAR FRACTION TWO THIRDS", "#e2f4ea"] +, ["uni2E3E", 11838, "WIGGLY VERTICAL LINE", "#efefef"] +, ["uni2E3D", 11837, "VERTICAL SIX DOTS", "#efefef"] +, ["uni2E3C", 11836, "STENOGRAPHIC FULL STOP", "#efefef"] +, ["uni2E3B", 11835, "THREE-EM DASH", "#efefef"] +, ["uni2E3A", 11834, "TWO-EM DASH", "#efefef"] +, ["uni1F16B", 127339, "RAISED MD SIGN", "#cceff2"] +, ["pisymbolgreek", 982, "GREEK PI SYMBOL"] +, ["uni1F16A", 127338, "RAISED MC SIGN", "#cceff2"] +, ["dieresisgravecomb.lc", null, null, "#dddddd"] +, ["uniA755", 42837, "LATIN SMALL LETTER P WITH SQUIRREL TAIL"] +, ["tonefive", 445, "LATIN SMALL LETTER TONE FIVE"] +, ["uniA752", 42834, "LATIN CAPITAL LETTER P WITH FLOURISH"] +, ["uniA79D", 42909, "LATIN SMALL LETTER VOLAPUK OE"] +, ["uniA753", 42835, "LATIN SMALL LETTER P WITH FLOURISH"] +, ["tildedieresiscomb", null, null, "#dddddd"] +, ["uniA750", 42832, "LATIN CAPITAL LETTER P WITH STROKE THROUGH DESCENDER"] +, ["uni0528", 1320, "CYRILLIC CAPITAL LETTER EN WITH LEFT HOOK"] +, ["uni0529", 1321, "CYRILLIC SMALL LETTER EN WITH LEFT HOOK"] +, ["tel", 8481, "TELEPHONE SIGN", "#cceff2"] +, ["uni0522", 1314, "CYRILLIC CAPITAL LETTER EN WITH MIDDLE HOOK"] +, ["uni0523", 1315, "CYRILLIC SMALL LETTER EN WITH MIDDLE HOOK"] +, ["uni0520", 1312, "CYRILLIC CAPITAL LETTER EL WITH MIDDLE HOOK"] +, ["uni0521", 1313, "CYRILLIC SMALL LETTER EL WITH MIDDLE HOOK"] +, ["uniA70D", 42765, "MODIFIER LETTER EXTRA-HIGH DOTTED LEFT-STEM TONE BAR", "#cceff2"] +, ["commasuprevnosp", 788, "COMBINING REVERSED COMMA ABOVE", "#f9e2e2"] +, ["uni2183.cn", null, null, "#c4f2c1"] +, ["dotdblsubnosp_uni1ABD", null, null, "#dddddd"] +, ["uni1D97.cn", null, null, "#c4f2c1"] +, ["hyphennobreak", 8209, "NON-BREAKING HYPHEN", "#efefef"] +, ["archdblsubnosp", 811, "COMBINING INVERTED DOUBLE ARCH BELOW", "#f9e2e2"] +, ["e.cn", null, null, "#c4f2c1"] +, ["hsuper", 688, "MODIFIER LETTER SMALL H"] +, ["deltaturn", 397, "LATIN SMALL LETTER TURNED DELTA"] +, ["published", null, null, "#dddddd"] +, ["uni052B", 1323, "CYRILLIC SMALL LETTER DZZHE"] +, ["uni052C", 1324, "CYRILLIC CAPITAL LETTER DCHE"] +, ["wsuper", 695, "MODIFIER LETTER SMALL W"] +, ["uni052A", 1322, "CYRILLIC CAPITAL LETTER DZZHE"] +, ["uni052D", 1325, "CYRILLIC SMALL LETTER DCHE"] +, ["uni037D.cn", null, null, "#c4f2c1"] +, ["macronacutecomb.lc", null, null, "#dddddd"] +, ["c.cn", null, null, "#c4f2c1"] +, ["Oopen.cn", null, null, "#c4f2c1"] +, ["oopen_uni02DE", null, null, "#dddddd"] +, ["four.frac", null, null, "#dddddd"] +, ["uni030C_uni1ABB", null, null, "#dddddd"] +, ["hhook", 614, "LATIN SMALL LETTER H WITH HOOK"] +, ["onedotleader", 8228, "ONE DOT LEADER", "#efefef"] +, ["uni02E8_uni02E9_uni02E5", null, null, "#dddddd"] +, ["uni02E8_uni02E9_uni02E6", null, null, "#dddddd"] +, ["uni02E8_uni02E9_uni02E9", null, null, "#dddddd"] +, ["uni02E8_uni02E9_uni02E8", null, null, "#dddddd"] +, ["plussubnosp", 799, "COMBINING PLUS SIGN BELOW", "#f9e2e2"] +, ["largerighthook", null, null, "#dddddd"] +, ["dtopbar", 396, "LATIN SMALL LETTER D WITH TOPBAR"] +, ["romanthree", 8546, "ROMAN NUMERAL THREE", "#e2f4ea"] +, ["uni1D36", 7478, "MODIFIER LETTER CAPITAL J"] +, ["uni1DCF", 7631, "COMBINING ZIGZAG BELOW", "#f9e2e2"] +, ["uni1DCE", 7630, "COMBINING OGONEK ABOVE", "#f9e2e2"] +, ["uni1DCD", 7629, "COMBINING DOUBLE CIRCUMFLEX ABOVE", "#f9e2e2"] +, ["uni1DCC", 7628, "COMBINING MACRON-BREVE", "#f9e2e2"] +, ["uni1DCB", 7627, "COMBINING BREVE-MACRON", "#f9e2e2"] +, ["uni1DCA", 7626, "COMBINING LATIN SMALL LETTER R BELOW", "#f9e2e2"] +, ["uniE001", 57345, "[private use E001]"] +, ["Nhook", 413, "LATIN CAPITAL LETTER N WITH LEFT HOOK"] +, ["tildemacroncomb.lc", null, null, "#dddddd"] +, ["iota1", 617, "LATIN SMALL LETTER IOTA"] +, ["uni02E6_uni02E8", null, null, "#dddddd"] +, ["uni02E6_uni02E9", null, null, "#dddddd"] +, ["uni1DC9", 7625, "COMBINING ACUTE-GRAVE-ACUTE", "#f9e2e2"] +, ["uni1DC8", 7624, "COMBINING GRAVE-ACUTE-GRAVE", "#f9e2e2"] +, ["uni1DC7", 7623, "COMBINING ACUTE-MACRON", "#f9e2e2"] +, ["uni02E6_uni02E5", null, null, "#dddddd"] +, ["uni1DC5", 7621, "COMBINING GRAVE-MACRON", "#f9e2e2"] +, ["uni02E6_uni02E7", null, null, "#dddddd"] +, ["uni1DC3", 7619, "COMBINING SUSPENSION MARK", "#f9e2e2"] +, ["uni1DC2", 7618, "COMBINING SNAKE BELOW", "#f9e2e2"] +, ["uni1DC1", 7617, "COMBINING DOTTED ACUTE ACCENT", "#f9e2e2"] +, ["uni1DC0", 7616, "COMBINING DOTTED GRAVE ACCENT", "#f9e2e2"] +, ["uniAB42", 43842, "LATIN SMALL LETTER TURNED OE WITH HORIZONTAL STROKE"] +, ["g.cn", null, null, "#c4f2c1"] +, ["uni2E2D", 11821, "FIVE DOT MARK", "#efefef"] +, ["uni2E2E", 11822, "REVERSED QUESTION MARK", "#efefef"] +, ["uni2E2F", 11823, "VERTICAL TILDE"] +, ["uni2E2A", 11818, "TWO DOTS OVER ONE DOT PUNCTUATION", "#efefef"] +, ["uni2E2B", 11819, "ONE DOT OVER TWO DOTS PUNCTUATION", "#efefef"] +, ["uni2E2C", 11820, "SQUARED FOUR DOT PUNCTUATION", "#efefef"] +, ["uni1D19", 7449, "LATIN LETTER SMALL CAPITAL REVERSED R"] +, ["uni1D18", 7448, "LATIN LETTER SMALL CAPITAL P"] +, ["uni02E9_uni02E7_uni02E5", null, null, "#dddddd"] +, ["uni1D11", 7441, "LATIN SMALL LETTER SIDEWAYS O"] +, ["uni1D10", 7440, "LATIN LETTER SMALL CAPITAL OPEN O"] +, ["uni1D13", 7443, "LATIN SMALL LETTER SIDEWAYS O WITH STROKE"] +, ["uni1D12", 7442, "LATIN SMALL LETTER SIDEWAYS OPEN O"] +, ["uni1D15", 7445, "LATIN LETTER SMALL CAPITAL OU"] +, ["uni1D14", 7444, "LATIN SMALL LETTER TURNED OE"] +, ["uni1D17", 7447, "LATIN SMALL LETTER BOTTOM HALF O"] +, ["uni1D16", 7446, "LATIN SMALL LETTER TOP HALF O"] +, ["uniFE28", 65064, "COMBINING LIGATURE RIGHT HALF BELOW", "#f9e2e2"] +, ["hooksubretronosp", 802, "COMBINING RETROFLEX HOOK BELOW", "#f9e2e2"] +, ["circumflexhookcomb", null, null, "#dddddd"] +, ["uniA785", 42885, "LATIN SMALL LETTER INSULAR S"] +, ["uniA784", 42884, "LATIN CAPITAL LETTER INSULAR S"] +, ["uniA787", 42887, "LATIN SMALL LETTER INSULAR T"] +, ["uniA781", 42881, "LATIN SMALL LETTER TURNED L"] +, ["uniA780", 42880, "LATIN CAPITAL LETTER TURNED L"] +, ["uniA783", 42883, "LATIN SMALL LETTER INSULAR R"] +, ["uniA782", 42882, "LATIN CAPITAL LETTER INSULAR R"] +, ["qhook.cn", null, null, "#c4f2c1"] +, ["ghook", 608, "LATIN SMALL LETTER G WITH HOOK"] +, ["uniA789", 42889, "MODIFIER LETTER COLON", "#cceff2"] +, ["uniA788", 42888, "MODIFIER LETTER LOW CIRCUMFLEX ACCENT"] +, ["uniA78E", 42894, "LATIN SMALL LETTER L WITH RETROFLEX HOOK AND BELT"] +, ["uniA78D", 42893, "LATIN CAPITAL LETTER TURNED H"] +, ["uniA78A", 42890, "MODIFIER LETTER SHORT EQUALS SIGN", "#cceff2"] +, ["uniA78C", 42892, "LATIN SMALL LETTER SALTILLO"] +, ["uniA78B", 42891, "LATIN CAPITAL LETTER SALTILLO"] +, ["Eturn", 398, "LATIN CAPITAL LETTER REVERSED E"] +, ["primerev1", 8245, "REVERSED PRIME", "#efefef"] +, ["uni1D1A", 7450, "LATIN LETTER SMALL CAPITAL TURNED R"] +, ["uni1D1C", 7452, "LATIN LETTER SMALL CAPITAL U"] +, ["uni1D1B", 7451, "LATIN LETTER SMALL CAPITAL T"] +, ["uni1D1E", 7454, "LATIN SMALL LETTER SIDEWAYS DIAERESIZED U"] +, ["uni1D1D", 7453, "LATIN SMALL LETTER SIDEWAYS U"] +, ["uni1D1F", 7455, "LATIN SMALL LETTER SIDEWAYS TURNED M"] +, ["uni2E24", 11812, "BOTTOM LEFT HALF BRACKET", "#efefef"] +, ["uni2E25", 11813, "BOTTOM RIGHT HALF BRACKET", "#efefef"] +, ["uni2E26", 11814, "LEFT SIDEWAYS U BRACKET", "#efefef"] +, ["uni2E27", 11815, "RIGHT SIDEWAYS U BRACKET", "#efefef"] +, ["uni2E20", 11808, "LEFT VERTICAL BAR WITH QUILL", "#efefef"] +, ["uni2E21", 11809, "RIGHT VERTICAL BAR WITH QUILL", "#efefef"] +, ["uni2E22", 11810, "TOP LEFT HALF BRACKET", "#efefef"] +, ["uni2E23", 11811, "TOP RIGHT HALF BRACKET", "#efefef"] +, ["uni2E28", 11816, "LEFT DOUBLE PARENTHESIS", "#efefef"] +, ["uni2E29", 11817, "RIGHT DOUBLE PARENTHESIS", "#efefef"] +, ["Eng", 330, "LATIN CAPITAL LETTER ENG"] +, ["macrondieresiscomb.lc", null, null, "#dddddd"] +, ["trthook", 648, "LATIN SMALL LETTER T WITH RETROFLEX HOOK"] +, ["uni02E5_uni02E6", null, null, "#dddddd"] +, ["twodotleader", 8229, "TWO DOT LEADER", "#efefef"] +, ["Chook.cn", null, null, "#c4f2c1"] +, ["uni02E8_uni02E6", null, null, "#dddddd"] +, ["uni02E8_uni02E7", null, null, "#dddddd"] +, ["uni02E8_uni02E5", null, null, "#dddddd"] +, ["uni02E8_uni02E9", null, null, "#dddddd"] +, ["romannine", 8552, "ROMAN NUMERAL NINE", "#e2f4ea"] +, ["epsilonclosed", 666, "LATIN SMALL LETTER CLOSED OPEN E"] +, ["uni1FBD", 8125, "GREEK KORONIS", "#cceff2"] +, ["uniA759", 42841, "LATIN SMALL LETTER Q WITH DIAGONAL STROKE"] +, ["omegacyrillic", 1121, "CYRILLIC SMALL LETTER OMEGA"] +, ["Omegacyrillic", 1120, "CYRILLIC CAPITAL LETTER OMEGA"] +, ["yuslittlecyrillic", 1127, "CYRILLIC SMALL LETTER LITTLE YUS"] +, ["Yuslittlecyrillic", 1126, "CYRILLIC CAPITAL LETTER LITTLE YUS"] +, ["eiotifiedcyrillic", 1125, "CYRILLIC SMALL LETTER IOTIFIED E"] +, ["Eiotifiedcyrillic", 1124, "CYRILLIC CAPITAL LETTER IOTIFIED E"] +, ["yuslittleiotifiedcyrillic", 1129, "CYRILLIC SMALL LETTER IOTIFIED LITTLE YUS"] +, ["Yuslittleiotifiedcyrillic", 1128, "CYRILLIC CAPITAL LETTER IOTIFIED LITTLE YUS"] +, ["uni02E9_uni02E7_uni02E8", null, null, "#dddddd"] +, ["uni02E9_uni02E7_uni02E9", null, null, "#dddddd"] +, ["four.sup", null, null, "#dddddd"] +, ["jcrosstail", 669, "LATIN SMALL LETTER J WITH CROSSED-TAIL"] +, ["uniA7B1", 42929, "LATIN CAPITAL LETTER TURNED T"] +, ["uniA7B0", 42928, "LATIN CAPITAL LETTER TURNED K"] +, ["uniA77F", 42879, "LATIN SMALL LETTER TURNED INSULAR G"] +, ["uni2C78.cn", null, null, "#c4f2c1"] +, ["tildecomb_uni1ABC", null, null, "#dddddd"] +, ["tildecomb_uni1ABB", null, null, "#dddddd"] +, ["uniA77B", 42875, "LATIN CAPITAL LETTER INSULAR F"] +, ["hhooksuper", 689, "MODIFIER LETTER SMALL H WITH HOOK"] +, ["Yusbigiotifiedcyrillic", 1132, "CYRILLIC CAPITAL LETTER IOTIFIED BIG YUS"] +, ["yusbigcyrillic", 1131, "CYRILLIC SMALL LETTER BIG YUS"] +, ["Yusbigcyrillic", 1130, "CYRILLIC CAPITAL LETTER BIG YUS"] +, ["ksicyrillic", 1135, "CYRILLIC SMALL LETTER KSI"] +, ["Ksicyrillic", 1134, "CYRILLIC CAPITAL LETTER KSI"] +, ["yusbigiotifiedcyrillic", 1133, "CYRILLIC SMALL LETTER IOTIFIED BIG YUS"] +, ["brevetildecomb", null, null, "#dddddd"] +, ["tildeacutecomb.lc", null, null, "#dddddd"] +, ["nine.frac", null, null, "#dddddd"] +, ["uni0456.ccmp", null, null, "#dddddd"] +, ["romanthousand", 8559, "ROMAN NUMERAL ONE THOUSAND", "#e2f4ea"] +, ["macronsub", 717, "MODIFIER LETTER LOW MACRON"] +, ["ccurl", 597, "LATIN SMALL LETTER C WITH CURL"] +, ["bullseye", 664, "LATIN LETTER BILABIAL CLICK"] +, ["dasiaoxia", null, null, "#dddddd"] +, ["dieresiscaroncomb.lc", null, null, "#dddddd"] +, ["uni1ABE", 6846, "COMBINING PARENTHESES OVERLAY", "#f9e2e2"] +, ["uni1ABD", 6845, "COMBINING PARENTHESES BELOW", "#f9e2e2"] +, ["uniA79A", 42906, "LATIN CAPITAL LETTER VOLAPUK AE"] +, ["uni1ABA", 6842, "COMBINING STRONG CENTRALIZATION STROKE BELOW", "#f9e2e2"] +, ["uni1ABC", 6844, "COMBINING DOUBLE PARENTHESES ABOVE", "#f9e2e2"] +, ["uni1ABB", 6843, "COMBINING PARENTHESES ABOVE", "#f9e2e2"] +, ["ascriptturn", 594, "LATIN SMALL LETTER TURNED ALPHA"] +, ["uni1DB8", 7608, "MODIFIER LETTER SMALL CAPITAL U"] +, ["uni1DB9", 7609, "MODIFIER LETTER SMALL V WITH HOOK"] +, ["uni1DB4", 7604, "MODIFIER LETTER SMALL ESH"] +, ["uni1DB5", 7605, "MODIFIER LETTER SMALL T WITH PALATAL HOOK"] +, ["uni1DB6", 7606, "MODIFIER LETTER SMALL U BAR"] +, ["uni1DB7", 7607, "MODIFIER LETTER SMALL UPSILON"] +, ["uni1DB0", 7600, "MODIFIER LETTER SMALL CAPITAL N"] +, ["uni1DB1", 7601, "MODIFIER LETTER SMALL BARRED O"] +, ["uni1DB2", 7602, "MODIFIER LETTER SMALL PHI"] +, ["uni1DB3", 7603, "MODIFIER LETTER SMALL S WITH HOOK"] +, ["uni1D0F", 7439, "LATIN LETTER SMALL CAPITAL O"] +, ["uni1D0D", 7437, "LATIN LETTER SMALL CAPITAL M"] +, ["uni1D0E", 7438, "LATIN LETTER SMALL CAPITAL REVERSED N"] +, ["uni1D0B", 7435, "LATIN LETTER SMALL CAPITAL K"] +, ["uni1D0C", 7436, "LATIN LETTER SMALL CAPITAL L WITH STROKE"] +, ["uni1D0A", 7434, "LATIN LETTER SMALL CAPITAL J"] +, ["uni02E5_uni02E6_uni02E7", null, null, "#dddddd"] +, ["uni1D06", 7430, "LATIN LETTER SMALL CAPITAL ETH"] +, ["uni1D07", 7431, "LATIN LETTER SMALL CAPITAL E"] +, ["uni1D04", 7428, "LATIN LETTER SMALL CAPITAL C"] +, ["uni1D05", 7429, "LATIN LETTER SMALL CAPITAL D"] +, ["uni1D02", 7426, "LATIN SMALL LETTER TURNED AE"] +, ["uni1D03", 7427, "LATIN LETTER SMALL CAPITAL BARRED B"] +, ["uni1D00", 7424, "LATIN LETTER SMALL CAPITAL A"] +, ["uni1D01", 7425, "LATIN LETTER SMALL CAPITAL AE"] +, ["uni1D08", 7432, "LATIN SMALL LETTER TURNED OPEN E"] +, ["uni1D09", 7433, "LATIN SMALL LETTER TURNED I"] +, ["jdotlessbar", 607, "LATIN SMALL LETTER DOTLESS J WITH STROKE"] +, ["uni1DBD", 7613, "MODIFIER LETTER SMALL Z WITH CURL"] +, ["Zbar", 437, "LATIN CAPITAL LETTER Z WITH STROKE"] +, ["uni1DBF", 7615, "MODIFIER LETTER SMALL THETA"] +, ["uni1DBA", 7610, "MODIFIER LETTER SMALL TURNED V"] +, ["uni1DBB", 7611, "MODIFIER LETTER SMALL Z"] +, ["uni1DBC", 7612, "MODIFIER LETTER SMALL Z WITH RETROFLEX HOOK"] +, ["uniA792", 42898, "LATIN CAPITAL LETTER C WITH BAR"] +, ["uni1AB4", 6836, "COMBINING TRIPLE DOT", "#f9e2e2"] +, ["uni1AB7", 6839, "COMBINING OPEN MARK BELOW", "#f9e2e2"] +, ["uni1AB6", 6838, "COMBINING WIGGLY LINE BELOW", "#f9e2e2"] +, ["uni1AB1", 6833, "COMBINING DIAERESIS-RING", "#f9e2e2"] +, ["uni1AB0", 6832, "COMBINING DOUBLED CIRCUMFLEX ACCENT", "#f9e2e2"] +, ["uni1AB3", 6835, "COMBINING DOWNWARDS ARROW", "#f9e2e2"] +, ["uni1AB2", 6834, "COMBINING INFINITY", "#f9e2e2"] +, ["uniA798", 42904, "LATIN CAPITAL LETTER F WITH STROKE"] +, ["uniA799", 42905, "LATIN SMALL LETTER F WITH STROKE"] +, ["uni1AB9", 6841, "COMBINING LIGHT CENTRALIZATION STROKE BELOW", "#f9e2e2"] +, ["uni1AB8", 6840, "COMBINING DOUBLE OPEN MARK BELOW", "#f9e2e2"] +, ["uni1D61", 7521, "MODIFIER LETTER SMALL CHI"] +, ["colon.pnum", 60929, "[private use EE01]"] +, ["glottalstopbarrev", 674, "LATIN LETTER REVERSED GLOTTAL STOP WITH STROKE"] +, ["uni1D66", 7526, "GREEK SUBSCRIPT SMALL LETTER BETA"] +, ["cyrillichook", null, null, "#dddddd"] +, ["uni2E11", 11793, "REVERSED FORKED PARAGRAPHOS", "#efefef"] +, ["uni2E10", 11792, "FORKED PARAGRAPHOS", "#efefef"] +, ["uni2E13", 11795, "DOTTED OBELOS", "#efefef"] +, ["uni2E12", 11794, "HYPODIASTOLE", "#efefef"] +, ["uni2E15", 11797, "UPWARDS ANCORA", "#efefef"] +, ["uni2E14", 11796, "DOWNWARDS ANCORA", "#efefef"] +, ["ezh", 658, "LATIN SMALL LETTER EZH"] +, ["ezhcurl", 659, "LATIN SMALL LETTER EZH WITH CURL"] +, ["uni2E19", 11801, "PALM BRANCH", "#efefef"] +, ["uni2E18", 11800, "INVERTED INTERROBANG", "#efefef"] +, ["srthook", 642, "LATIN SMALL LETTER S WITH HOOK"] +, ["uni2E1C", 11804, "LEFT LOW PARAPHRASE BRACKET", "#efefef"] +, ["uni2E1B", 11803, "TILDE WITH RING ABOVE", "#efefef"] +, ["uni2E1E", 11806, "TILDE WITH DOT ABOVE", "#efefef"] +, ["uni2E1D", 11805, "RIGHT LOW PARAPHRASE BRACKET", "#efefef"] +, ["uni2E1F", 11807, "TILDE WITH DOT BELOW", "#efefef"] +, ["Chook", 391, "LATIN CAPITAL LETTER C WITH HOOK"] +, ["uni1DE9", 7657, "COMBINING LATIN SMALL LETTER BETA", "#f9e2e2"] +, ["uniAB39", 43833, "LATIN SMALL LETTER L WITH MIDDLE RING"] +, ["uniAB38", 43832, "LATIN SMALL LETTER L WITH DOUBLE MIDDLE TILDE"] +, ["nine.sup", null, null, "#dddddd"] +, ["uniAB37", 43831, "LATIN SMALL LETTER L WITH INVERTED LAZY S"] +, ["uniAB36", 43830, "LATIN SMALL LETTER SCRIPT G WITH CROSSED-TAIL"] +, ["kturn", 670, "LATIN SMALL LETTER TURNED K"] +, ["numeralgreek", 884, "GREEK NUMERAL SIGN"] +, ["uniAB32", 43826, "LATIN SMALL LETTER BLACKLETTER E"] +, ["uni20AF", 8367, "DRACHMA SIGN", "#cceff2"] +, ["Ibar", 407, "LATIN CAPITAL LETTER I WITH STROKE"] +, ["Omegaroundcyrillic", 1146, "CYRILLIC CAPITAL LETTER ROUND OMEGA"] +, ["omegaroundcyrillic", 1147, "CYRILLIC SMALL LETTER ROUND OMEGA"] +, ["Omegatitlocyrillic", 1148, "CYRILLIC CAPITAL LETTER OMEGA WITH TITLO"] +, ["omegatitlocyrillic", 1149, "CYRILLIC SMALL LETTER OMEGA WITH TITLO"] +, ["Otcyrillic", 1150, "CYRILLIC CAPITAL LETTER OT"] +, ["otcyrillic", 1151, "CYRILLIC SMALL LETTER OT"] +, ["mturn", 623, "LATIN SMALL LETTER TURNED M"] +, ["uni2C61", 11361, "LATIN SMALL LETTER L WITH DOUBLE BAR"] +, ["gcursive", 609, "LATIN SMALL LETTER SCRIPT G"] +, ["uniA756.cn", null, null, "#c4f2c1"] +, ["yturn", 654, "LATIN SMALL LETTER TURNED Y"] +, ["uni035D", 861, "COMBINING DOUBLE BREVE", "#f9e2e2"] +, ["uni02E5_uni02E8_uni02E5", null, null, "#dddddd"] +, ["uni02E5_uni02E8_uni02E7", null, null, "#dddddd"] +, ["uni02E5_uni02E8_uni02E6", null, null, "#dddddd"] +, ["d.cn", null, null, "#c4f2c1"] +, ["uni035A", 858, "COMBINING DOUBLE RING BELOW", "#f9e2e2"] +, ["uni02E5_uni02E8_uni02E9", null, null, "#dddddd"] +, ["uni02E5_uni02E8_uni02E8", null, null, "#dddddd"] +, ["uni02E6_uni02E6_uni02E5", null, null, "#dddddd"] +, ["tccurl", 680, "LATIN SMALL LETTER TC DIGRAPH WITH CURL"] +, ["quotedblreversed", 8223, "DOUBLE HIGH-REVERSED-9 QUOTATION MARK", "#efefef"] +, ["Rturnsuper", 694, "MODIFIER LETTER SMALL CAPITAL INVERTED R"] +, ["uni1FCE", 8142, "GREEK PSILI AND OXIA", "#cceff2"] +, ["uni1FCF", 8143, "GREEK PSILI AND PERISPOMENI", "#cceff2"] +, ["upsilonlatin", 650, "LATIN SMALL LETTER UPSILON"] +, ["uni1FC1", 8129, "GREEK DIALYTIKA AND PERISPOMENI", "#cceff2"] +, ["uni1FC0", 8128, "GREEK PERISPOMENI", "#cceff2"] +, ["uniA775", 42869, "LATIN SMALL LETTER RUM"] +, ["dieresisacutecomb", null, null, "#dddddd"] +, ["eng_uni1ABE", null, null, "#dddddd"] +, ["macroncomb_uni1ABB", null, null, "#dddddd"] +, ["ringhalfleftcentered", 723, "MODIFIER LETTER CENTRED LEFT HALF RING", "#cceff2"] +, ["uni02E7_uni02E8_uni02E6", null, null, "#dddddd"] +, ["uni02E7_uni02E8_uni02E5", null, null, "#dddddd"] +, ["omegaclosed", 631, "LATIN SMALL LETTER CLOSED OMEGA"] +, ["uni1DAA", 7594, "MODIFIER LETTER SMALL L WITH PALATAL HOOK"] +, ["uni1DAC", 7596, "MODIFIER LETTER SMALL M WITH HOOK"] +, ["uni1DAB", 7595, "MODIFIER LETTER SMALL CAPITAL L"] +, ["uni1DAE", 7598, "MODIFIER LETTER SMALL N WITH LEFT HOOK"] +, ["uni1DAD", 7597, "MODIFIER LETTER SMALL TURNED M WITH LONG LEG"] +, ["uni1DAF", 7599, "MODIFIER LETTER SMALL N WITH RETROFLEX HOOK"] +, ["uni1DE2", 7650, "COMBINING LATIN LETTER SMALL CAPITAL R", "#f9e2e2"] +, ["rturnrthook", 635, "LATIN SMALL LETTER TURNED R WITH HOOK"] +, ["uniAB3B", 43835, "LATIN SMALL LETTER N WITH CROSSED-TAIL"] +, ["gbar", 485, "LATIN SMALL LETTER G WITH STROKE"] +, ["uni1DA9", 7593, "MODIFIER LETTER SMALL L WITH RETROFLEX HOOK"] +, ["uni1DA8", 7592, "MODIFIER LETTER SMALL J WITH CROSSED-TAIL"] +, ["OI", 418, "LATIN CAPITAL LETTER OI"] +, ["uni1DA1", 7585, "MODIFIER LETTER SMALL DOTLESS J WITH STROKE"] +, ["uni1DA0", 7584, "MODIFIER LETTER SMALL F"] +, ["uni1DA2", 7586, "MODIFIER LETTER SMALL SCRIPT G"] +, ["uni1DA5", 7589, "MODIFIER LETTER SMALL IOTA"] +, ["uni1DA4", 7588, "MODIFIER LETTER SMALL I WITH STROKE"] +, ["uni1DA7", 7591, "MODIFIER LETTER SMALL CAPITAL I WITH STROKE"] +, ["uni1DA6", 7590, "MODIFIER LETTER SMALL CAPITAL I"] +, ["xsupnosp", 829, "COMBINING X ABOVE", "#f9e2e2"] +, ["uni1D73", 7539, "LATIN SMALL LETTER R WITH FISHHOOK AND MIDDLE TILDE"] +, ["uni1D72", 7538, "LATIN SMALL LETTER R WITH MIDDLE TILDE"] +, ["uni1D71", 7537, "LATIN SMALL LETTER P WITH MIDDLE TILDE"] +, ["uni1D70", 7536, "LATIN SMALL LETTER N WITH MIDDLE TILDE"] +, ["uni1D77", 7543, "LATIN SMALL LETTER TURNED G"] +, ["uni1D76", 7542, "LATIN SMALL LETTER Z WITH MIDDLE TILDE"] +, ["uni1D75", 7541, "LATIN SMALL LETTER T WITH MIDDLE TILDE"] +, ["uni1D74", 7540, "LATIN SMALL LETTER S WITH MIDDLE TILDE"] +, ["uni2E0F", 11791, "PARAGRAPHOS", "#efefef"] +, ["uni1D79", 7545, "LATIN SMALL LETTER INSULAR G"] +, ["uni1D78", 7544, "MODIFIER LETTER CYRILLIC EN"] +, ["uni2E0B", 11787, "RAISED SQUARE", "#efefef"] +, ["uni2E0C", 11788, "LEFT RAISED OMISSION BRACKET", "#efefef"] +, ["ascriptturn_uni02DE", null, null, "#dddddd"] +, ["uni2E0A", 11786, "RIGHT TRANSPOSITION BRACKET", "#efefef"] +, ["sampi", 993, "GREEK SMALL LETTER SAMPI"] +, ["uni02E9_uni02E7_uni02E6", null, null, "#dddddd"] +, ["epsilon1revclosed_uni02DE", null, null, "#dddddd"] +, ["o.cn", null, null, "#c4f2c1"] +, ["uniA768", 42856, "LATIN CAPITAL LETTER VEND"] +, ["uniA767", 42855, "LATIN SMALL LETTER THORN WITH STROKE THROUGH DESCENDER"] +, ["uniA765", 42853, "LATIN SMALL LETTER THORN WITH STROKE"] +, ["uniA764", 42852, "LATIN CAPITAL LETTER THORN WITH STROKE"] +, ["uni02E9_uni02E7_uni02E7", null, null, "#dddddd"] +, ["uniA762", 42850, "LATIN CAPITAL LETTER VISIGOTHIC Z"] +, ["uniA761", 42849, "LATIN SMALL LETTER VY"] +, ["uniA760", 42848, "LATIN CAPITAL LETTER VY"] +, ["uni02E5_uni02E9_uni02E5", null, null, "#dddddd"] +, ["uniA76F", 42863, "LATIN SMALL LETTER CON"] +, ["uniA76E", 42862, "LATIN CAPITAL LETTER CON"] +, ["uniA76D", 42861, "LATIN SMALL LETTER IS"] +, ["uniA76C", 42860, "LATIN CAPITAL LETTER IS"] +, ["uniA76B", 42859, "LATIN SMALL LETTER ET"] +, ["eshshortrev", 645, "LATIN SMALL LETTER SQUAT REVERSED ESH"] +, ["b.cn", null, null, "#c4f2c1"] +, ["uni1D7C", 7548, "LATIN SMALL LETTER IOTA WITH STROKE"] +, ["uni1D7B", 7547, "LATIN SMALL CAPITAL LETTER I WITH STROKE"] +, ["uni1D7A", 7546, "LATIN SMALL LETTER TH WITH STRIKETHROUGH"] +, ["ounce", 8485, "OUNCE SIGN", "#cceff2"] +, ["uni1D7F", 7551, "LATIN SMALL LETTER UPSILON WITH STROKE"] +, ["uni1D7E", 7550, "LATIN SMALL CAPITAL LETTER U WITH STROKE"] +, ["uni1D7D", 7549, "LATIN SMALL LETTER P WITH STROKE"] +, ["uni2E06", 11782, "RAISED INTERPOLATION MARKER", "#efefef"] +, ["uni2E07", 11783, "RAISED DOTTED INTERPOLATION MARKER", "#efefef"] +, ["uni2E04", 11780, "LEFT DOTTED SUBSTITUTION BRACKET", "#efefef"] +, ["uni2E05", 11781, "RIGHT DOTTED SUBSTITUTION BRACKET", "#efefef"] +, ["uni2E03", 11779, "RIGHT SUBSTITUTION BRACKET", "#efefef"] +, ["uni2E01", 11777, "RIGHT ANGLE DOTTED SUBSTITUTION MARKER", "#efefef"] +, ["phook", 421, "LATIN SMALL LETTER P WITH HOOK"] +, ["uniA694", 42644, "CYRILLIC CAPITAL LETTER HWE"] +, ["vscript", 651, "LATIN SMALL LETTER V WITH HOOK"] +, ["uniA688", 42632, "CYRILLIC CAPITAL LETTER DZZE"] +, ["acuterightnosp", 833, "COMBINING ACUTE TONE MARK", "#f9e2e2"] +, ["uniA68F", 42639, "CYRILLIC SMALL LETTER TSWE"] +, ["barmidlongnosp", 822, "COMBINING LONG STROKE OVERLAY", "#f9e2e2"] +, ["uni02E6_uni02E6_uni02E9", null, null, "#dddddd"] +, ["a_uni02DE", null, null, "#dddddd"] +, ["tildevertsupnosp", 830, "COMBINING VERTICAL TILDE", "#f9e2e2"] +, ["dotmacroncomb", null, null, "#dddddd"] +, ["linevertdblnosp", 782, "COMBINING DOUBLE VERTICAL LINE ABOVE", "#f9e2e2"] +, ["s.cn", null, null, "#c4f2c1"] +, ["uni03F3.ccmp", null, null, "#dddddd"] +, ["uni1D96.ccmp", null, null, "#dddddd"] +, ["macrondieresiscomb", null, null, "#dddddd"] +, ["jhookdblbar", 644, "LATIN SMALL LETTER DOTLESS J WITH STROKE AND HOOK"] +, ["six.frac", null, null, "#dddddd"] +, ["G.cn", null, null, "#c4f2c1"] +, ["seven.frac", null, null, "#dddddd"] +, ["C.cn", null, null, "#c4f2c1"] +, ["overline", 8254, "OVERLINE", "#efefef"] +, ["uniA79B", 42907, "LATIN SMALL LETTER VOLAPUK AE"] +, ["uniA79C", 42908, "LATIN CAPITAL LETTER VOLAPUK OE"] +, ["ascript", 593, "LATIN SMALL LETTER ALPHA"] +, ["O.cn", null, null, "#c4f2c1"] +, ["uniA79F", 42911, "LATIN SMALL LETTER VOLAPUK UE"] +, ["seven.sup", null, null, "#dddddd"] +, ["threeeighths", 8540, "VULGAR FRACTION THREE EIGHTHS", "#e2f4ea"] +, ["uniA73E.cn", null, null, "#c4f2c1"] +, ["uni03CF", 975, "GREEK CAPITAL KAI SYMBOL"] +, ["uniA79E", 42910, "LATIN CAPITAL LETTER VOLAPUK UE"] +, ["uni20DC", 8412, "COMBINING FOUR DOTS ABOVE", "#f9e2e2"] +, ["uni20DB", 8411, "COMBINING THREE DOTS ABOVE", "#f9e2e2"] +, ["romansix", 8549, "ROMAN NUMERAL SIX", "#e2f4ea"] +, ["uni1FBF", 8127, "GREEK PSILI", "#cceff2"] +, ["uni1FBE", 8126, "GREEK PROSGEGRAMMENI"] +, ["uniA77D", 42877, "LATIN CAPITAL LETTER INSULAR G"] +, ["uniA77E", 42878, "LATIN CAPITAL LETTER TURNED INSULAR G"] +, ["romaneight", 8551, "ROMAN NUMERAL EIGHT", "#e2f4ea"] +, ["uniA77A", 42874, "LATIN SMALL LETTER INSULAR D"] +, ["bhook", 595, "LATIN SMALL LETTER B WITH HOOK"] +, ["uniA77C", 42876, "LATIN SMALL LETTER INSULAR F"] +, ["Dhook", 394, "LATIN CAPITAL LETTER D WITH HOOK"] +, ["Dbar1", 393, "LATIN CAPITAL LETTER AFRICAN D"] +, ["uni1D6A", 7530, "GREEK SUBSCRIPT SMALL LETTER CHI"] +, ["zrthook", 656, "LATIN SMALL LETTER Z WITH RETROFLEX HOOK"] +, ["uni1D6C", 7532, "LATIN SMALL LETTER B WITH MIDDLE TILDE"] +, ["uni1D6D", 7533, "LATIN SMALL LETTER D WITH MIDDLE TILDE"] +, ["uni1D6E", 7534, "LATIN SMALL LETTER F WITH MIDDLE TILDE"] +, ["uni1D6F", 7535, "LATIN SMALL LETTER M WITH MIDDLE TILDE"] +, ["zero.frac", null, null, "#dddddd"] +, ["uni1D60", 7520, "MODIFIER LETTER SMALL GREEK PHI"] +, ["charactertie", 8256, "CHARACTER TIE", "#efefef"] +, ["uni1D62", 7522, "LATIN SUBSCRIPT SMALL LETTER I"] +, ["uni1D63", 7523, "LATIN SUBSCRIPT SMALL LETTER R"] +, ["uni1D64", 7524, "LATIN SUBSCRIPT SMALL LETTER U"] +, ["uni1D65", 7525, "LATIN SUBSCRIPT SMALL LETTER V"] +, ["uni1D67", 7527, "GREEK SUBSCRIPT SMALL LETTER GAMMA"] +, ["uni1D68", 7528, "GREEK SUBSCRIPT SMALL LETTER RHO"] +, ["uni1D69", 7529, "GREEK SUBSCRIPT SMALL LETTER PHI"] +, ["uniA778", 42872, "LATIN SMALL LETTER UM"] +, ["uniA779", 42873, "LATIN CAPITAL LETTER INSULAR D"] +, ["uniA774", 42868, "LATIN SMALL LETTER NUM"] +, ["Stigma", 986, "GREEK LETTER STIGMA"] +, ["uniA776", 42870, "LATIN LETTER SMALL CAPITAL RUM"] +, ["uniA777", 42871, "LATIN SMALL LETTER TUM"] +, ["uniA770", 42864, "MODIFIER LETTER US"] +, ["uniA771", 42865, "LATIN SMALL LETTER DUM"] +, ["uniA772", 42866, "LATIN SMALL LETTER LUM"] +, ["uniA773", 42867, "LATIN SMALL LETTER MUM"] +, ["epsilon1rev", 604, "LATIN SMALL LETTER REVERSED OPEN E"] +, ["uni1D62.ccmp", null, null, "#dddddd"] +, ["iotasubnosp_uni1ABD", null, null, "#dddddd"] +, ["Epsilon1", 400, "LATIN CAPITAL LETTER OPEN E"] +, ["uni1DA8.ccmp", null, null, "#dddddd"] +, ["Gamma1", 404, "LATIN CAPITAL LETTER GAMMA"] +, ["iotasubnosp", 837, "COMBINING GREEK YPOGEGRAMMENI", "#f9e2e2"] +, ["dieresisgravecomb", null, null, "#dddddd"] +, ["tildemacroncomb", null, null, "#dddddd"] +, ["uni0221", 545, "LATIN SMALL LETTER D WITH CURL"] +, ["gamma1", 611, "LATIN SMALL LETTER GAMMA"] +, ["uniA7AB", 42923, "LATIN CAPITAL LETTER REVERSED OPEN E"] +, ["uniA7AC", 42924, "LATIN CAPITAL LETTER SCRIPT G"] +, ["uniA7AA", 42922, "LATIN CAPITAL LETTER H WITH HOOK"] +, ["bhook.cn", null, null, "#c4f2c1"] +, ["uniA7AD", 42925, "LATIN CAPITAL LETTER L WITH BELT"] +, ["pipedblbar", 450, "LATIN LETTER ALVEOLAR CLICK"] +, ["uni2E08", 11784, "DOTTED TRANSPOSITION MARKER", "#efefef"] +, ["YR", 422, "LATIN LETTER YR"] +, ["kgreenlandic", 312, "LATIN SMALL LETTER KRA"] +, ["uni2E09", 11785, "LEFT TRANSPOSITION BRACKET", "#efefef"] +, ["uni2182", 8578, "ROMAN NUMERAL TEN THOUSAND", "#e2f4ea"] +, ["uni2183", 8579, "ROMAN NUMERAL REVERSED ONE HUNDRED"] +, ["uni2180", 8576, "ROMAN NUMERAL ONE THOUSAND C D", "#e2f4ea"] +, ["uni2181", 8577, "ROMAN NUMERAL FIVE THOUSAND", "#e2f4ea"] +, ["uni2186", 8582, "ROMAN NUMERAL FIFTY EARLY FORM", "#e2f4ea"] +, ["uni2187", 8583, "ROMAN NUMERAL FIFTY THOUSAND", "#e2f4ea"] +, ["uni2184", 8580, "LATIN SMALL LETTER REVERSED C"] +, ["uni2185", 8581, "ROMAN NUMERAL SIX LATE FORM", "#e2f4ea"] +, ["uni2188", 8584, "ROMAN NUMERAL ONE HUNDRED THOUSAND", "#e2f4ea"] +, ["uni0222", 546, "LATIN CAPITAL LETTER OU"] +, ["romanseven", 8550, "ROMAN NUMERAL SEVEN", "#e2f4ea"] +, ["uniA7A8", 42920, "LATIN CAPITAL LETTER S WITH OBLIQUE STROKE"] +, ["uniA7A9", 42921, "LATIN SMALL LETTER S WITH OBLIQUE STROKE"] +, ["uniA7A2", 42914, "LATIN CAPITAL LETTER K WITH OBLIQUE STROKE"] +, ["uniA7A3", 42915, "LATIN SMALL LETTER K WITH OBLIQUE STROKE"] +, ["uniA7A0", 42912, "LATIN CAPITAL LETTER G WITH OBLIQUE STROKE"] +, ["uniA7A1", 42913, "LATIN SMALL LETTER G WITH OBLIQUE STROKE"] +, ["uniA7A6", 42918, "LATIN CAPITAL LETTER R WITH OBLIQUE STROKE"] +, ["btopbar", 387, "LATIN SMALL LETTER B WITH TOPBAR"] +, ["uniA7A4", 42916, "LATIN CAPITAL LETTER N WITH OBLIQUE STROKE"] +, ["uniA7A5", 42917, "LATIN SMALL LETTER N WITH OBLIQUE STROKE"] +, ["aturn", 592, "LATIN SMALL LETTER TURNED A"] +, ["uni2C7A", 11386, "LATIN SMALL LETTER O WITH LOW RING INSIDE"] +, ["uni2C7C", 11388, "LATIN SUBSCRIPT SMALL LETTER J"] +, ["uni2C7B", 11387, "LATIN LETTER SMALL CAPITAL TURNED E"] +, ["uni2C7D", 11389, "MODIFIER LETTER CAPITAL V"] +, ["uni2C7F", 11391, "LATIN CAPITAL LETTER Z WITH SWASH TAIL"] +, ["rfishhookrev", 639, "LATIN SMALL LETTER REVERSED R WITH FISHHOOK"] +, ["uniA70B", 42763, "MODIFIER LETTER LOW DOTTED TONE BAR", "#cceff2"] +, ["romanhundred", 8557, "ROMAN NUMERAL ONE HUNDRED", "#e2f4ea"] +, ["uni1FEE", 8174, "GREEK DIALYTIKA AND OXIA", "#cceff2"] +, ["uni1FED", 8173, "GREEK DIALYTIKA AND VARIA", "#cceff2"] +, ["graveleftnosp", 832, "COMBINING GRAVE TONE MARK", "#f9e2e2"] +, ["uniA7FF", 43007, "LATIN EPIGRAPHIC LETTER ARCHAIC M"] +, ["uni2C71", 11377, "LATIN SMALL LETTER V WITH RIGHT HOOK"] +, ["uni2C70", 11376, "LATIN CAPITAL LETTER TURNED ALPHA"] +, ["uni2C73", 11379, "LATIN SMALL LETTER W WITH HOOK"] +, ["uni2189", 8585, "VULGAR FRACTION ZERO THIRDS", "#e2f4ea"] +, ["uni2C74", 11380, "LATIN SMALL LETTER V WITH CURL"] +, ["uni2C77", 11383, "LATIN SMALL LETTER TAILLESS PHI"] +, ["uni2C76", 11382, "LATIN SMALL LETTER HALF H"] +, ["uni2C79", 11385, "LATIN SMALL LETTER TURNED R WITH TAIL"] +, ["uni2C78", 11384, "LATIN SMALL LETTER E WITH NOTCH"] +, ["xsuper", 739, "MODIFIER LETTER SMALL X"] +, ["tackleftsubnosp", 792, "COMBINING LEFT TACK BELOW", "#f9e2e2"] +, ["eurocurrency", 8352, "EURO-CURRENCY SIGN", "#cceff2"] +, ["uni20E8", 8424, "COMBINING TRIPLE UNDERDOT", "#f9e2e2"] +, ["rho1", 1009, "GREEK RHO SYMBOL"] +, ["uni20E3", 8419, "COMBINING ENCLOSING KEYCAP", "#f9e2e2"] +, ["uni1AB5", 6837, "COMBINING X-X BELOW", "#f9e2e2"] +, ["uniA7F9", 43001, "MODIFIER LETTER SMALL LIGATURE OE"] +, ["uniA793", 42899, "LATIN SMALL LETTER C WITH BAR"] +, ["uniA7F8", 43000, "MODIFIER LETTER CAPITAL H WITH STROKE"] +, ["uniA790", 42896, "LATIN CAPITAL LETTER N WITH DESCENDER"] +, ["uniA791", 42897, "LATIN SMALL LETTER N WITH DESCENDER"] +, ["uniA796", 42902, "LATIN CAPITAL LETTER B WITH FLOURISH"] +, ["dblapostrophe", 750, "MODIFIER LETTER DOUBLE APOSTROPHE"] +, ["uniA797", 42903, "LATIN SMALL LETTER B WITH FLOURISH"] +, ["uniA794", 42900, "LATIN SMALL LETTER C WITH PALATAL HOOK"] +, ["uniA795", 42901, "LATIN SMALL LETTER H WITH PALATAL HOOK"] +, ["glottalstopbar", 673, "LATIN LETTER GLOTTAL STOP WITH STROKE"] +, ["dieresisnosp_uni1ABB", null, null, "#dddddd"] +, ["uni1D59", 7513, "MODIFIER LETTER SMALL SIDEWAYS U"] +, ["uni1D58", 7512, "MODIFIER LETTER SMALL U"] +, ["uni1D55", 7509, "MODIFIER LETTER SMALL BOTTOM HALF O"] +, ["uni1D54", 7508, "MODIFIER LETTER SMALL TOP HALF O"] +, ["uni1D57", 7511, "MODIFIER LETTER SMALL T"] +, ["uni1D56", 7510, "MODIFIER LETTER SMALL P"] +, ["uni1D51", 7505, "MODIFIER LETTER SMALL ENG"] +, ["uni1D50", 7504, "MODIFIER LETTER SMALL M"] +, ["uni1D53", 7507, "MODIFIER LETTER SMALL OPEN O"] +, ["uni1D52", 7506, "MODIFIER LETTER SMALL O"] +, ["Yhook", 435, "LATIN CAPITAL LETTER Y WITH HOOK"] +, ["uniA749", 42825, "LATIN SMALL LETTER L WITH HIGH STROKE"] +, ["uniA748", 42824, "LATIN CAPITAL LETTER L WITH HIGH STROKE"] +, ["minussubnosp", 800, "COMBINING MINUS SIGN BELOW", "#f9e2e2"] +, ["uniA741", 42817, "LATIN SMALL LETTER K WITH STROKE"] +, ["uniA740", 42816, "LATIN CAPITAL LETTER K WITH STROKE"] +, ["uniA743", 42819, "LATIN SMALL LETTER K WITH DIAGONAL STROKE"] +, ["uniA742", 42818, "LATIN CAPITAL LETTER K WITH DIAGONAL STROKE"] +, ["uniA745", 42821, "LATIN SMALL LETTER K WITH STROKE AND DIAGONAL STROKE"] +, ["uniA747", 42823, "LATIN SMALL LETTER BROKEN L"] +, ["uniA746", 42822, "LATIN CAPITAL LETTER BROKEN L"] +, ["uniA74A", 42826, "LATIN CAPITAL LETTER O WITH LONG STROKE OVERLAY"] +, ["uniA74C", 42828, "LATIN CAPITAL LETTER O WITH LOOP"] +, ["uniA74B", 42827, "LATIN SMALL LETTER O WITH LONG STROKE OVERLAY"] +, ["uniA74E", 42830, "LATIN CAPITAL LETTER OO"] +, ["uniA74D", 42829, "LATIN SMALL LETTER O WITH LOOP"] +, ["rturnsuper", 692, "MODIFIER LETTER SMALL TURNED R"] +, ["linevert", 712, "MODIFIER LETTER VERTICAL LINE"] +, ["uni02E5_uni02E6_uni02E5", null, null, "#dddddd"] +, ["Phook", 420, "LATIN CAPITAL LETTER P WITH HOOK"] +, ["addresssubject", 8449, "ADDRESSED TO THE SUBJECT", "#cceff2"] +, ["eight.frac", null, null, "#dddddd"] +, ["primetriplerev1", 8247, "REVERSED TRIPLE PRIME", "#efefef"] +, ["uni1D5E", 7518, "MODIFIER LETTER SMALL GREEK GAMMA"] +, ["uni1D5D", 7517, "MODIFIER LETTER SMALL BETA"] +, ["uni1D5F", 7519, "MODIFIER LETTER SMALL DELTA"] +, ["uni1D5A", 7514, "MODIFIER LETTER SMALL TURNED M"] +, ["uni1D5C", 7516, "MODIFIER LETTER SMALL AIN"] +, ["uni1D5B", 7515, "MODIFIER LETTER SMALL V"] +, ["uni217F", 8575, "SMALL ROMAN NUMERAL ONE THOUSAND", "#e2f4ea"] +, ["uni217E", 8574, "SMALL ROMAN NUMERAL FIVE HUNDRED", "#e2f4ea"] +, ["uni217D", 8573, "SMALL ROMAN NUMERAL ONE HUNDRED", "#e2f4ea"] +, ["uni217C", 8572, "SMALL ROMAN NUMERAL FIFTY", "#e2f4ea"] +, ["twelveroman", 8571, "SMALL ROMAN NUMERAL TWELVE", "#e2f4ea"] +, ["elevenroman", 8570, "SMALL ROMAN NUMERAL ELEVEN", "#e2f4ea"] +, ["lambdabar", 411, "LATIN SMALL LETTER LAMBDA WITH STROKE"] +, ["uniA671", 42609, "COMBINING CYRILLIC HUNDRED MILLIONS SIGN", "#f9e2e2"] +, ["uniA767.cn", null, null, "#c4f2c1"] +, ["rfishhook", 638, "LATIN SMALL LETTER R WITH FISHHOOK"] +, ["uniA7A7", 42919, "LATIN SMALL LETTER R WITH OBLIQUE STROKE"] +, ["pipe", 448, "LATIN LETTER DENTAL CLICK"] +, ["tenroman", 8569, "SMALL ROMAN NUMERAL TEN", "#e2f4ea"] +, ["nineroman", 8568, "SMALL ROMAN NUMERAL NINE", "#e2f4ea"] +, ["eightroman", 8567, "SMALL ROMAN NUMERAL EIGHT", "#e2f4ea"] +, ["sevenroman", 8566, "SMALL ROMAN NUMERAL SEVEN", "#e2f4ea"] +, ["dzcurl", 677, "LATIN SMALL LETTER DZ DIGRAPH WITH CURL"] +, ["fiveroman", 8564, "SMALL ROMAN NUMERAL FIVE", "#e2f4ea"] +, ["fourroman", 8563, "SMALL ROMAN NUMERAL FOUR", "#e2f4ea"] +, ["threeroman", 8562, "SMALL ROMAN NUMERAL THREE", "#e2f4ea"] +, ["tworoman", 8561, "SMALL ROMAN NUMERAL TWO", "#e2f4ea"] +, ["oneroman", 8560, "SMALL ROMAN NUMERAL ONE", "#e2f4ea"] +, ["uniA654", 42580, "CYRILLIC CAPITAL LETTER REVERSED YU"] +, ["circumflextildecomb", null, null, "#dddddd"] +, ["underscoredblnosp", 819, "COMBINING DOUBLE LOW LINE", "#f9e2e2"] +, ["ysuper", 696, "MODIFIER LETTER SMALL Y"] +, ["tackrightsubnosp", 793, "COMBINING RIGHT TACK BELOW", "#f9e2e2"] +, ["uni2E17", 11799, "DOUBLE OBLIQUE HYPHEN", "#efefef"] +, ["uni2E16", 11798, "DOTTED RIGHT-POINTING ANGLE", "#efefef"] +, ["uniA665", 42597, "CYRILLIC SMALL LETTER SOFT EL"] +, ["D.cn", null, null, "#c4f2c1"] +, ["rsuper", 691, "MODIFIER LETTER SMALL R"] +, ["Sampi", 992, "GREEK LETTER SAMPI"] +, ["uni2C7E", 11390, "LATIN CAPITAL LETTER S WITH SWASH TAIL"] +, ["uni2E1A", 11802, "HYPHEN WITH DIAERESIS", "#efefef"] +, ["schwa", 601, "LATIN SMALL LETTER SCHWA"] +, ["gbar_uni1ABE", null, null, "#dddddd"] +, ["uni1DDA", 7642, "COMBINING LATIN SMALL LETTER G", "#f9e2e2"] +, ["koppacyrillic", 1153, "CYRILLIC SMALL LETTER KOPPA"] +, ["Koppacyrillic", 1152, "CYRILLIC CAPITAL LETTER KOPPA"] +, ["titlocyrilliccmb", 1155, "COMBINING CYRILLIC TITLO", "#f9e2e2"] +, ["thousandcyrillic", 1154, "CYRILLIC THOUSANDS SIGN", "#cceff2"] +, ["dasiapneumatacyrilliccmb", 1157, "COMBINING CYRILLIC DASIA PNEUMATA", "#f9e2e2"] +, ["palatalizationcyrilliccmb", 1156, "COMBINING CYRILLIC PALATALIZATION", "#f9e2e2"] +, ["uni0487", 1159, "COMBINING CYRILLIC POKRYTIE", "#f9e2e2"] +, ["psilipneumatacyrilliccmb", 1158, "COMBINING CYRILLIC PSILI PNEUMATA", "#f9e2e2"] +, ["uni0489", 1161, "COMBINING CYRILLIC MILLIONS SIGN", "#f9e2e2"] +, ["uni0488", 1160, "COMBINING CYRILLIC HUNDRED THOUSANDS SIGN", "#f9e2e2"] +, ["uni2C64", 11364, "LATIN CAPITAL LETTER R WITH TAIL"] +, ["uni2C65", 11365, "LATIN SMALL LETTER A WITH STROKE"] +, ["uni2C62", 11362, "LATIN CAPITAL LETTER L WITH MIDDLE TILDE"] +, ["uni2C63", 11363, "LATIN CAPITAL LETTER P WITH STROKE"] +, ["uni02E5_uni02E5_uni02E6", null, null, "#dddddd"] +, ["uni02E5_uni02E5_uni02E7", null, null, "#dddddd"] +, ["bbar", 384, "LATIN SMALL LETTER B WITH STROKE"] +, ["dhook", 599, "LATIN SMALL LETTER D WITH HOOK"] +, ["uni20F0", 8432, "COMBINING ASTERISK ABOVE", "#f9e2e2"] +, ["eshcurl", 646, "LATIN SMALL LETTER ESH WITH CURL"] +, ["uni2C6F", 11375, "LATIN CAPITAL LETTER TURNED A"] +, ["uni2C6E", 11374, "LATIN CAPITAL LETTER M WITH HOOK"] +, ["uni2C6C", 11372, "LATIN SMALL LETTER Z WITH DESCENDER", ""] +, ["uniA657.cn", null, null, "#c4f2c1"] +, ["uni2C6A", 11370, "LATIN SMALL LETTER K WITH DESCENDER", ""] +, ["quotereversed", 8219, "SINGLE HIGH-REVERSED-9 QUOTATION MARK", "#efefef"] +, ["romanfive", 8548, "ROMAN NUMERAL FIVE", "#e2f4ea"] +, ["tonetwo", 424, "LATIN SMALL LETTER TONE TWO"] +, ["rdescend", 636, "LATIN SMALL LETTER R WITH LONG LEG"] +, ["uni1FDD", 8157, "GREEK DASIA AND VARIA", "#cceff2"] +, ["uni1FDE", 8158, "GREEK DASIA AND OXIA", "#cceff2"] +, ["uni1FDF", 8159, "GREEK DASIA AND PERISPOMENI", "#cceff2"] +, ["slashshortnosp", 823, "COMBINING SHORT SOLIDUS OVERLAY", "#f9e2e2"] +, ["twostroke", 443, "LATIN LETTER TWO WITH STROKE"] +, ["ezhtail", 442, "LATIN SMALL LETTER EZH WITH TAIL"] +, ["uni1DF0", 7664, "COMBINING LATIN SMALL LETTER U WITH LIGHT CENTRALIZATION STROKE", "#f9e2e2"] +, ["uni1DF1", 7665, "COMBINING LATIN SMALL LETTER W", "#f9e2e2"] +, ["uni1DF2", 7666, "COMBINING LATIN SMALL LETTER A WITH DIAERESIS", "#f9e2e2"] +, ["uni1DF3", 7667, "COMBINING LATIN SMALL LETTER O WITH DIAERESIS", "#f9e2e2"] +, ["uni1DF4", 7668, "COMBINING LATIN SMALL LETTER U WITH DIAERESIS", "#f9e2e2"] +, ["uni1DF5", 7669, "COMBINING UP TACK ABOVE", "#f9e2e2"] +, ["uniA75A", 42842, "LATIN CAPITAL LETTER R ROTUNDA"] +, ["caronbelowcmb", 812, "COMBINING CARON BELOW", "#f9e2e2"] +, ["acutesubnosp", 791, "COMBINING ACUTE ACCENT BELOW", "#f9e2e2"] +, ["romantwo", 8545, "ROMAN NUMERAL TWO", "#e2f4ea"] +, ["eight.sup", null, null, "#dddddd"] +, ["uni1D6B", 7531, "LATIN SMALL LETTER UE"] +, ["uni1D4B", 7499, "MODIFIER LETTER SMALL OPEN E"] +, ["uni1D4A", 7498, "MODIFIER LETTER SMALL SCHWA"] +, ["uni1D4F", 7503, "MODIFIER LETTER SMALL K"] +, ["uni1D4D", 7501, "MODIFIER LETTER SMALL G"] +, ["uni1D4E", 7502, "MODIFIER LETTER SMALL TURNED I"] +, ["uni1D48", 7496, "MODIFIER LETTER SMALL D"] +, ["uni1D49", 7497, "MODIFIER LETTER SMALL E"] +, ["uni1D42", 7490, "MODIFIER LETTER CAPITAL W"] +, ["uni1D43", 7491, "MODIFIER LETTER SMALL A"] +, ["uni1D40", 7488, "MODIFIER LETTER CAPITAL T"] +, ["uni1D41", 7489, "MODIFIER LETTER CAPITAL U"] +, ["uni1D46", 7494, "MODIFIER LETTER SMALL TURNED AE"] +, ["uni1D47", 7495, "MODIFIER LETTER SMALL B"] +, ["uni1D44", 7492, "MODIFIER LETTER SMALL TURNED A"] +, ["uni1D45", 7493, "MODIFIER LETTER SMALL ALPHA"] +, ["finalkaf", 682, "LATIN SMALL LETTER LS DIGRAPH"] +, ["uniA756", 42838, "LATIN CAPITAL LETTER Q WITH STROKE THROUGH DESCENDER"] +, ["uniA757", 42839, "LATIN SMALL LETTER Q WITH STROKE THROUGH DESCENDER"] +, ["uniA754", 42836, "LATIN CAPITAL LETTER P WITH SQUIRREL TAIL"] +, ["uni1DFC", 7676, "COMBINING DOUBLE INVERTED BREVE BELOW", "#f9e2e2"] +, ["uni1DFD", 7677, "COMBINING ALMOST EQUAL TO BELOW", "#f9e2e2"] +, ["uni1DFE", 7678, "COMBINING LEFT ARROWHEAD ABOVE", "#f9e2e2"] +, ["uni1DFF", 7679, "COMBINING RIGHT ARROWHEAD AND DOWN ARROWHEAD BELOW", "#f9e2e2"] +, ["uniA751", 42833, "LATIN SMALL LETTER P WITH STROKE THROUGH DESCENDER"] +, ["uniA758", 42840, "LATIN CAPITAL LETTER Q WITH DIAGONAL STROKE"] +, ["uni1FEF", 8175, "GREEK VARIA", "#cceff2"] +, ["dblprimemod", 698, "MODIFIER LETTER DOUBLE PRIME"] +, ["Ezhreversed", 440, "LATIN CAPITAL LETTER EZH REVERSED"] +, ["ezhreversed", 441, "LATIN SMALL LETTER EZH REVERSED"] +, ["Ezh", 439, "LATIN CAPITAL LETTER EZH"] +, ["romanfifty", 8556, "ROMAN NUMERAL FIFTY", "#e2f4ea"] +, ["uni02E8_uni02E6_uni02E5", null, null, "#dddddd"] +, ["ringhalfright", 702, "MODIFIER LETTER RIGHT HALF RING"] +, ["Ismallcap", 618, "LATIN LETTER SMALL CAPITAL I"] +, ["uptackmod", 724, "MODIFIER LETTER UP TACK", "#cceff2"] +, ["hyphendot", 8231, "HYPHENATION POINT", "#efefef"] +, ["uniFE29", 65065, "COMBINING TILDE LEFT HALF BELOW", "#f9e2e2"] +, ["epsilon1revhook", 605, "LATIN SMALL LETTER REVERSED OPEN E WITH HOOK"] +, ["uniFE20", 65056, "COMBINING LIGATURE LEFT HALF", "#f9e2e2"] +, ["uniFE21", 65057, "COMBINING LIGATURE RIGHT HALF", "#f9e2e2"] +, ["uniFE22", 65058, "COMBINING DOUBLE TILDE LEFT HALF", "#f9e2e2"] +, ["uniFE23", 65059, "COMBINING DOUBLE TILDE RIGHT HALF", "#f9e2e2"] +, ["uniFE24", 65060, "COMBINING MACRON LEFT HALF", "#f9e2e2"] +, ["uniFE25", 65061, "COMBINING MACRON RIGHT HALF", "#f9e2e2"] +, ["uniFE26", 65062, "COMBINING CONJOINING MACRON", "#f9e2e2"] +, ["uniFE27", 65063, "COMBINING LIGATURE LEFT HALF BELOW", "#f9e2e2"] +, ["lyogh", 622, "LATIN SMALL LETTER LEZH"] +, ["uniFE2A", 65066, "COMBINING TILDE RIGHT HALF BELOW", "#f9e2e2"] +, ["uniFE2B", 65067, "COMBINING MACRON LEFT HALF BELOW", "#f9e2e2"] +, ["uniFE2C", 65068, "COMBINING MACRON RIGHT HALF BELOW", "#f9e2e2"] +, ["uniFE2D", 65069, "COMBINING CONJOINING MACRON BELOW", "#f9e2e2"] +, ["finalnun", 687, "LATIN SMALL LETTER TURNED H WITH FISHHOOK AND TAIL"] +, ["brevehookcomb", null, null, "#dddddd"] +, ["uniA76A", 42858, "LATIN CAPITAL LETTER ET"] +, ["gammasuper", 736, "MODIFIER LETTER SMALL GAMMA"] +, ["dyogh", 676, "LATIN SMALL LETTER DEZH DIGRAPH"] +, ["chook", 392, "LATIN SMALL LETTER C WITH HOOK"] +, ["Nsmallcap", 628, "LATIN LETTER SMALL CAPITAL N"] +, ["uniA729", 42793, "LATIN SMALL LETTER TZ"] +, ["Oopen", 390, "LATIN CAPITAL LETTER OPEN O"] +, ["uni2098", 8344, "LATIN SUBSCRIPT SMALL LETTER M"] +, ["uni2099", 8345, "LATIN SUBSCRIPT SMALL LETTER N"] +, ["uni2090", 8336, "LATIN SUBSCRIPT SMALL LETTER A"] +, ["uni2091", 8337, "LATIN SUBSCRIPT SMALL LETTER E"] +, ["uni2092", 8338, "LATIN SUBSCRIPT SMALL LETTER O"] +, ["uni2093", 8339, "LATIN SUBSCRIPT SMALL LETTER X"] +, ["uni2094", 8340, "LATIN SUBSCRIPT SMALL LETTER SCHWA"] +, ["uni2095", 8341, "LATIN SUBSCRIPT SMALL LETTER H"] +, ["uni2096", 8342, "LATIN SUBSCRIPT SMALL LETTER K"] +, ["uni2097", 8343, "LATIN SUBSCRIPT SMALL LETTER L"] +, ["uni02B2.ccmp", null, null, "#dddddd"] +, ["bridgeinvsubnosp", 826, "COMBINING INVERTED BRIDGE BELOW", "#f9e2e2"] +, ["uni2C72", 11378, "LATIN CAPITAL LETTER W WITH HOOK"] +, ["uni2C75", 11381, "LATIN CAPITAL LETTER HALF H"] +, ["lbar", 410, "LATIN SMALL LETTER L WITH BAR"] +, ["tpalatalhook", 427, "LATIN SMALL LETTER T WITH PALATAL HOOK"] +, ["Bhook", 385, "LATIN CAPITAL LETTER B WITH HOOK"] +, ["uni209A", 8346, "LATIN SUBSCRIPT SMALL LETTER P"] +, ["uni209B", 8347, "LATIN SUBSCRIPT SMALL LETTER S"] +, ["uni209C", 8348, "LATIN SUBSCRIPT SMALL LETTER T"] +, ["uni02E6_uni02E6_uni02E7", null, null, "#dddddd"] +, ["uni02E6_uni02E6_uni02E8", null, null, "#dddddd"] +, ["uni02E6_uni02E5_uni02E7", null, null, "#dddddd"] +, ["commaaccentrotate", null, null, "#dddddd"] +, ["esh", 643, "LATIN SMALL LETTER ESH"] +, ["cyrillictic", null, null, "#dddddd"] +, ["uni02E8_uni02E7_uni02E6", null, null, "#dddddd"] +, ["uni1DEE", 7662, "COMBINING LATIN SMALL LETTER P", "#f9e2e2"] +, ["uni1DED", 7661, "COMBINING LATIN SMALL LETTER O WITH LIGHT CENTRALIZATION STROKE", "#f9e2e2"] +, ["uni1DEF", 7663, "COMBINING LATIN SMALL LETTER ESH", "#f9e2e2"] +, ["uni1DEA", 7658, "COMBINING LATIN SMALL LETTER SCHWA", "#f9e2e2"] +, ["uni1DEC", 7660, "COMBINING LATIN SMALL LETTER L WITH DOUBLE MIDDLE TILDE", "#f9e2e2"] +, ["uni1DEB", 7659, "COMBINING LATIN SMALL LETTER F", "#f9e2e2"] +, ["pipedbl", 449, "LATIN LETTER LATERAL CLICK"] +, ["uni1AB9_uni1ABD", null, null, "#dddddd"] +, ["yhook", 436, "LATIN SMALL LETTER Y WITH HOOK"] +, ["chook.cn", null, null, "#c4f2c1"] +, ["uni029D.ccmp", null, null, "#dddddd"] +, ["uni2C7C.ccmp", null, null, "#dddddd"] +, ["wturn", 653, "LATIN SMALL LETTER TURNED W"] +, ["hv", 405, "LATIN SMALL LETTER HV"] +, ["controlHT", 9, "[Control]", "#ff4c4c"] +, ["uni1DE8", 7656, "COMBINING LATIN SMALL LETTER B", "#f9e2e2"] +, ["uni1DE5", 7653, "COMBINING LATIN SMALL LETTER LONG S", "#f9e2e2"] +, ["uni1DE4", 7652, "COMBINING LATIN SMALL LETTER S", "#f9e2e2"] +, ["uni1DE7", 7655, "COMBINING LATIN SMALL LETTER ALPHA", "#f9e2e2"] +, ["uni1DE6", 7654, "COMBINING LATIN SMALL LETTER Z", "#f9e2e2"] +, ["uni1DE1", 7649, "COMBINING LATIN LETTER SMALL CAPITAL N", "#f9e2e2"] +, ["uni1DE0", 7648, "COMBINING LATIN SMALL LETTER N", "#f9e2e2"] +, ["uni1DE3", 7651, "COMBINING LATIN SMALL LETTER R ROTUNDA", "#f9e2e2"] +, ["controlSOT", 2, "[Control]", "#ff4c4c"] +, ["uni037B.cn", null, null, "#c4f2c1"] +, ["uniA92E", 43310, "KAYAH LI SIGN CWI", "#efefef"] +, ["bbar_uni1ABE", null, null, "#dddddd"] +, ["rrthook_uni1ABE", null, null, "#dddddd"] +, ["uniA722", 42786, "LATIN CAPITAL LETTER EGYPTOLOGICAL ALEF"] +, ["uniA721", 42785, "MODIFIER LETTER STRESS AND LOW TONE", "#cceff2"] +, ["uniA720", 42784, "MODIFIER LETTER STRESS AND HIGH TONE", "#cceff2"] +, ["uniA727", 42791, "LATIN SMALL LETTER HENG"] +, ["uniA726", 42790, "LATIN CAPITAL LETTER HENG"] +, ["uniA725", 42789, "LATIN SMALL LETTER EGYPTOLOGICAL AIN"] +, ["uniA724", 42788, "LATIN CAPITAL LETTER EGYPTOLOGICAL AIN"] +, ["commaabovecmb", 787, "COMBINING COMMA ABOVE", "#f9e2e2"] +, ["uniA728", 42792, "LATIN CAPITAL LETTER TZ"] +, ["commaaboverightcmb", 789, "COMBINING COMMA ABOVE RIGHT", "#f9e2e2"] +, ["uni0502", 1282, "CYRILLIC CAPITAL LETTER KOMI DJE"] +, ["hooksubpalatnosp", 801, "COMBINING PALATALIZED HOOK BELOW", "#f9e2e2"] +, ["uni1AB8_uni1ABD", null, null, "#dddddd"] +, ["uniA758.cn", null, null, "#c4f2c1"] +, ["ezhcaron", 495, "LATIN SMALL LETTER EZH WITH CARON"] +, ["Ezhcaron", 494, "LATIN CAPITAL LETTER EZH WITH CARON"] +, ["uniA72C", 42796, "LATIN CAPITAL LETTER CUATRILLO"] +, ["uniA72B", 42795, "LATIN SMALL LETTER TRESILLO"] +, ["uniA72A", 42794, "LATIN CAPITAL LETTER TRESILLO"] +, ["uniA72F", 42799, "LATIN SMALL LETTER CUATRILLO WITH COMMA"] +, ["uniA72E", 42798, "LATIN CAPITAL LETTER CUATRILLO WITH COMMA"] +, ["uniA72D", 42797, "LATIN SMALL LETTER CUATRILLO"] +, ["uni2E42", 11842, "DOUBLE LOW-REVERSED-9 QUOTATION MARK", "#efefef"] +, ["uni2E40", 11840, "DOUBLE HYPHEN", "#efefef"] +, ["uni2E41", 11841, "REVERSED COMMA", "#efefef"] +, ["tildeacutecomb", null, null, "#dddddd"] +, ["leftloop.cn", null, null, "#c4f2c1"] +, ["rightloop.cn", null, null, "#c4f2c1"] +, ["uniE000", 57344, "[Private_Use]", "#f7f2d3"] +, ["uniE004", 57348, "[private use E004]"] +, ["uniE002", 57346, "[private use E002]"] +, ["uniE003", 57347, "[private use E003]"] +, [".notdef", null, null, "#dddddd"] +, ["DZcaron", 453, "LATIN CAPITAL LETTER D WITH SMALL LETTER Z WITH CARON"] +, ["Eth", 208, "LATIN CAPITAL LETTER ETH", ""] +, ["hbar", 295, "LATIN SMALL LETTER H WITH STROKE", ""] +, ["Tbar", 358, "LATIN CAPITAL LETTER T WITH STROKE", ""] +, ["tbar", 359, "LATIN SMALL LETTER T WITH STROKE", ""] +, ["Agrave", 192, "LATIN CAPITAL LETTER A WITH GRAVE", ""] +, ["Aacute", 193, "LATIN CAPITAL LETTER A WITH ACUTE", ""] +, ["Acircumflex", 194, "LATIN CAPITAL LETTER A WITH CIRCUMFLEX", ""] +, ["Atilde", 195, "LATIN CAPITAL LETTER A WITH TILDE", ""] +, ["Adieresis", 196, "LATIN CAPITAL LETTER A WITH DIAERESIS", ""] +, ["Aring", 197, "LATIN CAPITAL LETTER A WITH RING ABOVE", ""] +, ["Aringacute", 506, "LATIN CAPITAL LETTER A WITH RING ABOVE AND ACUTE", ""] +, ["Ccedilla", 199, "LATIN CAPITAL LETTER C WITH CEDILLA", ""] +, ["Egrave", 200, "LATIN CAPITAL LETTER E WITH GRAVE", ""] +, ["Eacute", 201, "LATIN CAPITAL LETTER E WITH ACUTE", ""] +, ["Ecircumflex", 202, "LATIN CAPITAL LETTER E WITH CIRCUMFLEX", ""] +, ["Edieresis", 203, "LATIN CAPITAL LETTER E WITH DIAERESIS", ""] +, ["Igrave", 204, "LATIN CAPITAL LETTER I WITH GRAVE", ""] +, ["Iacute", 205, "LATIN CAPITAL LETTER I WITH ACUTE", ""] +, ["Icircumflex", 206, "LATIN CAPITAL LETTER I WITH CIRCUMFLEX", ""] +, ["Idieresis", 207, "LATIN CAPITAL LETTER I WITH DIAERESIS", ""] +, ["Ntilde", 209, "LATIN CAPITAL LETTER N WITH TILDE", ""] +, ["Ograve", 210, "LATIN CAPITAL LETTER O WITH GRAVE", ""] +, ["Oacute", 211, "LATIN CAPITAL LETTER O WITH ACUTE", ""] +, ["Ocircumflex", 212, "LATIN CAPITAL LETTER O WITH CIRCUMFLEX", ""] +, ["Otilde", 213, "LATIN CAPITAL LETTER O WITH TILDE", ""] +, ["Odieresis", 214, "LATIN CAPITAL LETTER O WITH DIAERESIS", ""] +, ["Ugrave", 217, "LATIN CAPITAL LETTER U WITH GRAVE", ""] +, ["Uacute", 218, "LATIN CAPITAL LETTER U WITH ACUTE", ""] +, ["Ucircumflex", 219, "LATIN CAPITAL LETTER U WITH CIRCUMFLEX", ""] +, ["Udieresis", 220, "LATIN CAPITAL LETTER U WITH DIAERESIS", ""] +, ["Yacute", 221, "LATIN CAPITAL LETTER Y WITH ACUTE", ""] +, ["agrave", 224, "LATIN SMALL LETTER A WITH GRAVE", ""] +, ["aacute", 225, "LATIN SMALL LETTER A WITH ACUTE", ""] +, ["acircumflex", 226, "LATIN SMALL LETTER A WITH CIRCUMFLEX", ""] +, ["atilde", 227, "LATIN SMALL LETTER A WITH TILDE", ""] +, ["adieresis", 228, "LATIN SMALL LETTER A WITH DIAERESIS", ""] +, ["aring", 229, "LATIN SMALL LETTER A WITH RING ABOVE", ""] +, ["aringacute", 507, "LATIN SMALL LETTER A WITH RING ABOVE AND ACUTE", ""] +, ["ccedilla", 231, "LATIN SMALL LETTER C WITH CEDILLA", ""] +, ["egrave", 232, "LATIN SMALL LETTER E WITH GRAVE", ""] +, ["eacute", 233, "LATIN SMALL LETTER E WITH ACUTE", ""] +, ["ecircumflex", 234, "LATIN SMALL LETTER E WITH CIRCUMFLEX", ""] +, ["edieresis", 235, "LATIN SMALL LETTER E WITH DIAERESIS", ""] +, ["igrave", 236, "LATIN SMALL LETTER I WITH GRAVE", ""] +, ["iacute", 237, "LATIN SMALL LETTER I WITH ACUTE", ""] +, ["icircumflex", 238, "LATIN SMALL LETTER I WITH CIRCUMFLEX", ""] +, ["idieresis", 239, "LATIN SMALL LETTER I WITH DIAERESIS", ""] +, ["ntilde", 241, "LATIN SMALL LETTER N WITH TILDE", ""] +, ["ograve", 242, "LATIN SMALL LETTER O WITH GRAVE", ""] +, ["oacute", 243, "LATIN SMALL LETTER O WITH ACUTE", ""] +, ["ocircumflex", 244, "LATIN SMALL LETTER O WITH CIRCUMFLEX", ""] +, ["otilde", 245, "LATIN SMALL LETTER O WITH TILDE", ""] +, ["odieresis", 246, "LATIN SMALL LETTER O WITH DIAERESIS", ""] +, ["ugrave", 249, "LATIN SMALL LETTER U WITH GRAVE", ""] +, ["uacute", 250, "LATIN SMALL LETTER U WITH ACUTE", ""] +, ["ucircumflex", 251, "LATIN SMALL LETTER U WITH CIRCUMFLEX", ""] +, ["udieresis", 252, "LATIN SMALL LETTER U WITH DIAERESIS", ""] +, ["yacute", 253, "LATIN SMALL LETTER Y WITH ACUTE", ""] +, ["ydieresis", 255, "LATIN SMALL LETTER Y WITH DIAERESIS", ""] +, ["Amacron", 256, "LATIN CAPITAL LETTER A WITH MACRON", ""] +, ["amacron", 257, "LATIN SMALL LETTER A WITH MACRON", ""] +, ["Abreve", 258, "LATIN CAPITAL LETTER A WITH BREVE", ""] +, ["abreve", 259, "LATIN SMALL LETTER A WITH BREVE", ""] +, ["Aogonek", 260, "LATIN CAPITAL LETTER A WITH OGONEK", ""] +, ["aogonek", 261, "LATIN SMALL LETTER A WITH OGONEK", ""] +, ["Cacute", 262, "LATIN CAPITAL LETTER C WITH ACUTE", ""] +, ["cacute", 263, "LATIN SMALL LETTER C WITH ACUTE", ""] +, ["Ccircumflex", 264, "LATIN CAPITAL LETTER C WITH CIRCUMFLEX", ""] +, ["ccircumflex", 265, "LATIN SMALL LETTER C WITH CIRCUMFLEX", ""] +, ["Ccaron", 268, "LATIN CAPITAL LETTER C WITH CARON", ""] +, ["ccaron", 269, "LATIN SMALL LETTER C WITH CARON", ""] +, ["Dcaron", 270, "LATIN CAPITAL LETTER D WITH CARON", ""] +, ["dcaron", 271, "LATIN SMALL LETTER D WITH CARON", ""] +, ["Emacron", 274, "LATIN CAPITAL LETTER E WITH MACRON", ""] +, ["emacron", 275, "LATIN SMALL LETTER E WITH MACRON", ""] +, ["Ebreve", 276, "LATIN CAPITAL LETTER E WITH BREVE", ""] +, ["ebreve", 277, "LATIN SMALL LETTER E WITH BREVE", ""] +, ["Edotaccent", 278, "LATIN CAPITAL LETTER E WITH DOT ABOVE", ""] +, ["edotaccent", 279, "LATIN SMALL LETTER E WITH DOT ABOVE", ""] +, ["Eogonek", 280, "LATIN CAPITAL LETTER E WITH OGONEK", ""] +, ["eogonek", 281, "LATIN SMALL LETTER E WITH OGONEK", ""] +, ["Ecaron", 282, "LATIN CAPITAL LETTER E WITH CARON", ""] +, ["ecaron", 283, "LATIN SMALL LETTER E WITH CARON", ""] +, ["Gcircumflex", 284, "LATIN CAPITAL LETTER G WITH CIRCUMFLEX", ""] +, ["gcircumflex", 285, "LATIN SMALL LETTER G WITH CIRCUMFLEX", ""] +, ["Gbreve", 286, "LATIN CAPITAL LETTER G WITH BREVE", ""] +, ["gbreve", 287, "LATIN SMALL LETTER G WITH BREVE", ""] +, ["Gcommaaccent", 290, "LATIN CAPITAL LETTER G WITH CEDILLA", ""] +, ["gcommaaccent", 291, "LATIN SMALL LETTER G WITH CEDILLA", ""] +, ["Hcircumflex", 292, "LATIN CAPITAL LETTER H WITH CIRCUMFLEX", ""] +, ["hcircumflex", 293, "LATIN SMALL LETTER H WITH CIRCUMFLEX", ""] +, ["Itilde", 296, "LATIN CAPITAL LETTER I WITH TILDE", ""] +, ["itilde", 297, "LATIN SMALL LETTER I WITH TILDE", ""] +, ["Imacron", 298, "LATIN CAPITAL LETTER I WITH MACRON", ""] +, ["imacron", 299, "LATIN SMALL LETTER I WITH MACRON", ""] +, ["Ibreve", 300, "LATIN CAPITAL LETTER I WITH BREVE", ""] +, ["ibreve", 301, "LATIN SMALL LETTER I WITH BREVE", ""] +, ["Iogonek", 302, "LATIN CAPITAL LETTER I WITH OGONEK", ""] +, ["iogonek", 303, "LATIN SMALL LETTER I WITH OGONEK", ""] +, ["Idotaccent", 304, "LATIN CAPITAL LETTER I WITH DOT ABOVE", ""] +, ["IJ", 306, "LATIN CAPITAL LIGATURE IJ", ""] +, ["ij", 307, "LATIN SMALL LIGATURE IJ", ""] +, ["Jcircumflex", 308, "LATIN CAPITAL LETTER J WITH CIRCUMFLEX", ""] +, ["jcircumflex", 309, "LATIN SMALL LETTER J WITH CIRCUMFLEX", ""] +, ["Kcommaaccent", 310, "LATIN CAPITAL LETTER K WITH CEDILLA", ""] +, ["kcommaaccent", 311, "LATIN SMALL LETTER K WITH CEDILLA", ""] +, ["Lacute", 313, "LATIN CAPITAL LETTER L WITH ACUTE", ""] +, ["lacute", 314, "LATIN SMALL LETTER L WITH ACUTE", ""] +, ["Lcommaaccent", 315, "LATIN CAPITAL LETTER L WITH CEDILLA", ""] +, ["lcommaaccent", 316, "LATIN SMALL LETTER L WITH CEDILLA", ""] +, ["Lcaron", 317, "LATIN CAPITAL LETTER L WITH CARON", ""] +, ["lcaron", 318, "LATIN SMALL LETTER L WITH CARON", ""] +, ["Nacute", 323, "LATIN CAPITAL LETTER N WITH ACUTE", ""] +, ["nacute", 324, "LATIN SMALL LETTER N WITH ACUTE", ""] +, ["Ncommaaccent", 325, "LATIN CAPITAL LETTER N WITH CEDILLA", ""] +, ["ncommaaccent", 326, "LATIN SMALL LETTER N WITH CEDILLA", ""] +, ["Ncaron", 327, "LATIN CAPITAL LETTER N WITH CARON", ""] +, ["ncaron", 328, "LATIN SMALL LETTER N WITH CARON", ""] +, ["Omacron", 332, "LATIN CAPITAL LETTER O WITH MACRON", ""] +, ["omacron", 333, "LATIN SMALL LETTER O WITH MACRON", ""] +, ["Obreve", 334, "LATIN CAPITAL LETTER O WITH BREVE", ""] +, ["obreve", 335, "LATIN SMALL LETTER O WITH BREVE", ""] +, ["Ohungarumlaut", 336, "LATIN CAPITAL LETTER O WITH DOUBLE ACUTE", ""] +, ["ohungarumlaut", 337, "LATIN SMALL LETTER O WITH DOUBLE ACUTE", ""] +, ["Racute", 340, "LATIN CAPITAL LETTER R WITH ACUTE", ""] +, ["racute", 341, "LATIN SMALL LETTER R WITH ACUTE", ""] +, ["Rcommaaccent", 342, "LATIN CAPITAL LETTER R WITH CEDILLA", ""] +, ["rcommaaccent", 343, "LATIN SMALL LETTER R WITH CEDILLA", ""] +, ["Rcaron", 344, "LATIN CAPITAL LETTER R WITH CARON", ""] +, ["rcaron", 345, "LATIN SMALL LETTER R WITH CARON", ""] +, ["Sacute", 346, "LATIN CAPITAL LETTER S WITH ACUTE", ""] +, ["sacute", 347, "LATIN SMALL LETTER S WITH ACUTE", ""] +, ["Scircumflex", 348, "LATIN CAPITAL LETTER S WITH CIRCUMFLEX", ""] +, ["scircumflex", 349, "LATIN SMALL LETTER S WITH CIRCUMFLEX", ""] +, ["Scedilla", 350, "LATIN CAPITAL LETTER S WITH CEDILLA", ""] +, ["scedilla", 351, "LATIN SMALL LETTER S WITH CEDILLA", ""] +, ["Scaron", 352, "LATIN CAPITAL LETTER S WITH CARON", ""] +, ["scaron", 353, "LATIN SMALL LETTER S WITH CARON", ""] +, ["uni021A", 538, "LATIN CAPITAL LETTER T WITH COMMA BELOW", ""] +, ["uni021B", 539, "LATIN SMALL LETTER T WITH COMMA BELOW", ""] +, ["Tcaron", 356, "LATIN CAPITAL LETTER T WITH CARON", ""] +, ["tcaron", 357, "LATIN SMALL LETTER T WITH CARON", ""] +, ["Utilde", 360, "LATIN CAPITAL LETTER U WITH TILDE", ""] +, ["utilde", 361, "LATIN SMALL LETTER U WITH TILDE", ""] +, ["Umacron", 362, "LATIN CAPITAL LETTER U WITH MACRON", ""] +, ["umacron", 363, "LATIN SMALL LETTER U WITH MACRON", ""] +, ["Ubreve", 364, "LATIN CAPITAL LETTER U WITH BREVE", ""] +, ["ubreve", 365, "LATIN SMALL LETTER U WITH BREVE", ""] +, ["Uring", 366, "LATIN CAPITAL LETTER U WITH RING ABOVE", ""] +, ["uring", 367, "LATIN SMALL LETTER U WITH RING ABOVE", ""] +, ["Uhungarumlaut", 368, "LATIN CAPITAL LETTER U WITH DOUBLE ACUTE", ""] +, ["uhungarumlaut", 369, "LATIN SMALL LETTER U WITH DOUBLE ACUTE", ""] +, ["Uogonek", 370, "LATIN CAPITAL LETTER U WITH OGONEK", ""] +, ["uogonek", 371, "LATIN SMALL LETTER U WITH OGONEK", ""] +, ["Wcircumflex", 372, "LATIN CAPITAL LETTER W WITH CIRCUMFLEX", ""] +, ["wcircumflex", 373, "LATIN SMALL LETTER W WITH CIRCUMFLEX", ""] +, ["Ycircumflex", 374, "LATIN CAPITAL LETTER Y WITH CIRCUMFLEX", ""] +, ["ycircumflex", 375, "LATIN SMALL LETTER Y WITH CIRCUMFLEX", ""] +, ["Ydieresis", 376, "LATIN CAPITAL LETTER Y WITH DIAERESIS", ""] +, ["Zacute", 377, "LATIN CAPITAL LETTER Z WITH ACUTE", ""] +, ["zacute", 378, "LATIN SMALL LETTER Z WITH ACUTE", ""] +, ["Zdotaccent", 379, "LATIN CAPITAL LETTER Z WITH DOT ABOVE", ""] +, ["zdotaccent", 380, "LATIN SMALL LETTER Z WITH DOT ABOVE", ""] +, ["Zcaron", 381, "LATIN CAPITAL LETTER Z WITH CARON", ""] +, ["zcaron", 382, "LATIN SMALL LETTER Z WITH CARON", ""] +, ["AEacute", 508, "LATIN CAPITAL LETTER AE WITH ACUTE", ""] +, ["aeacute", 509, "LATIN SMALL LETTER AE WITH ACUTE", ""] +, ["Alphatonos", 902, "GREEK CAPITAL LETTER ALPHA WITH TONOS", ""] +, ["Epsilontonos", 904, "GREEK CAPITAL LETTER EPSILON WITH TONOS", ""] +, ["Etatonos", 905, "GREEK CAPITAL LETTER ETA WITH TONOS", ""] +, ["Iotatonos", 906, "GREEK CAPITAL LETTER IOTA WITH TONOS", ""] +, ["Omicrontonos", 908, "GREEK CAPITAL LETTER OMICRON WITH TONOS", ""] +, ["Upsilontonos", 910, "GREEK CAPITAL LETTER UPSILON WITH TONOS", ""] +, ["Omegatonos", 911, "GREEK CAPITAL LETTER OMEGA WITH TONOS", ""] +, ["iotadieresistonos", 912, "GREEK SMALL LETTER IOTA WITH DIALYTIKA AND TONOS", ""] +, ["Alpha", 913, "GREEK CAPITAL LETTER ALPHA", ""] +, ["Beta", 914, "GREEK CAPITAL LETTER BETA", ""] +, ["Epsilon", 917, "GREEK CAPITAL LETTER EPSILON", ""] +, ["Zeta", 918, "GREEK CAPITAL LETTER ZETA", ""] +, ["Eta", 919, "GREEK CAPITAL LETTER ETA", ""] +, ["Iota", 921, "GREEK CAPITAL LETTER IOTA", ""] +, ["Kappa", 922, "GREEK CAPITAL LETTER KAPPA", ""] +, ["Mu", 924, "GREEK CAPITAL LETTER MU", ""] +, ["Nu", 925, "GREEK CAPITAL LETTER NU", ""] +, ["Omicron", 927, "GREEK CAPITAL LETTER OMICRON", ""] +, ["Rho", 929, "GREEK CAPITAL LETTER RHO", ""] +, ["Tau", 932, "GREEK CAPITAL LETTER TAU", ""] +, ["Upsilon", 933, "GREEK CAPITAL LETTER UPSILON", ""] +, ["Chi", 935, "GREEK CAPITAL LETTER CHI", ""] +, ["Iotadieresis", 938, "GREEK CAPITAL LETTER IOTA WITH DIALYTIKA", ""] +, ["Upsilondieresis", 939, "GREEK CAPITAL LETTER UPSILON WITH DIALYTIKA", ""] +, ["alphatonos", 940, "GREEK SMALL LETTER ALPHA WITH TONOS", ""] +, ["epsilontonos", 941, "GREEK SMALL LETTER EPSILON WITH TONOS", ""] +, ["etatonos", 942, "GREEK SMALL LETTER ETA WITH TONOS", ""] +, ["iotatonos", 943, "GREEK SMALL LETTER IOTA WITH TONOS", ""] +, ["upsilondieresistonos", 944, "GREEK SMALL LETTER UPSILON WITH DIALYTIKA AND TONOS", ""] +, ["kappa", 954, "GREEK SMALL LETTER KAPPA", ""] +, ["omicron", 959, "GREEK SMALL LETTER OMICRON", ""] +, ["nu", 957, "GREEK SMALL LETTER NU", ""] +, ["iotadieresis", 970, "GREEK SMALL LETTER IOTA WITH DIALYTIKA", ""] +, ["upsilondieresis", 971, "GREEK SMALL LETTER UPSILON WITH DIALYTIKA", ""] +, ["omicrontonos", 972, "GREEK SMALL LETTER OMICRON WITH TONOS", ""] +, ["upsilontonos", 973, "GREEK SMALL LETTER UPSILON WITH TONOS", ""] +, ["omegatonos", 974, "GREEK SMALL LETTER OMEGA WITH TONOS", ""] +, ["Wgrave", 7808, "LATIN CAPITAL LETTER W WITH GRAVE", ""] +, ["wgrave", 7809, "LATIN SMALL LETTER W WITH GRAVE", ""] +, ["Wacute", 7810, "LATIN CAPITAL LETTER W WITH ACUTE", ""] +, ["wacute", 7811, "LATIN SMALL LETTER W WITH ACUTE", ""] +, ["Wdieresis", 7812, "LATIN CAPITAL LETTER W WITH DIAERESIS", ""] +, ["wdieresis", 7813, "LATIN SMALL LETTER W WITH DIAERESIS", ""] +, ["Ygrave", 7922, "LATIN CAPITAL LETTER Y WITH GRAVE", ""] +, ["ygrave", 7923, "LATIN SMALL LETTER Y WITH GRAVE", ""] +, ["minute", 8242, "PRIME", ""] +, ["second", 8243, "DOUBLE PRIME", ""] +, ["exclamdbl", 8252, "DOUBLE EXCLAMATION MARK", ""] +, ["uni0400", 1024, "CYRILLIC CAPITAL LETTER IE WITH GRAVE", ""] +, ["uni040D", 1037, "CYRILLIC CAPITAL LETTER I WITH GRAVE", ""] +, ["uni0450", 1104, "CYRILLIC SMALL LETTER IE WITH GRAVE", ""] +, ["uni045D", 1117, "CYRILLIC SMALL LETTER I WITH GRAVE", ""] +, ["uni04CF", 1231, "CYRILLIC SMALL LETTER PALOCHKA", ""] +, ["uni04EC", 1260, "CYRILLIC CAPITAL LETTER E WITH DIAERESIS", ""] +, ["uni04ED", 1261, "CYRILLIC SMALL LETTER E WITH DIAERESIS", ""] +, ["uni0501", 1281, "CYRILLIC SMALL LETTER KOMI DE", ""] +, ["uni04F6", 1270, "CYRILLIC CAPITAL LETTER GHE WITH DESCENDER", ""] +, ["uni04F7", 1271, "CYRILLIC SMALL LETTER GHE WITH DESCENDER", ""] +, ["uni048C", 1164, "CYRILLIC CAPITAL LETTER SEMISOFT SIGN", ""] +, ["uni04FE", 1278, "CYRILLIC CAPITAL LETTER HA WITH STROKE", ""] +, ["uni04FF", 1279, "CYRILLIC SMALL LETTER HA WITH STROKE", ""] +, ["uni0511", 1297, "CYRILLIC SMALL LETTER REVERSED ZE", ""] +, ["franc", 8355, "FRENCH FRANC SIGN", ""] +, ["LJ", 455, "LATIN CAPITAL LETTER LJ", ""] +, ["Lj", 456, "LATIN CAPITAL LETTER L WITH SMALL LETTER J", ""] +, ["lj", 457, "LATIN SMALL LETTER LJ", ""] +, ["NJ", 458, "LATIN CAPITAL LETTER NJ", ""] +, ["Nj", 459, "LATIN CAPITAL LETTER N WITH SMALL LETTER J", ""] +, ["nj", 460, "LATIN SMALL LETTER NJ", ""] +, ["Acaron", 461, "LATIN CAPITAL LETTER A WITH CARON", ""] +, ["acaron", 462, "LATIN SMALL LETTER A WITH CARON", ""] +, ["Icaron", 463, "LATIN CAPITAL LETTER I WITH CARON", ""] +, ["icaron", 464, "LATIN SMALL LETTER I WITH CARON", ""] +, ["Ocaron", 465, "LATIN CAPITAL LETTER O WITH CARON", ""] +, ["ocaron", 466, "LATIN SMALL LETTER O WITH CARON", ""] +, ["Ucaron", 467, "LATIN CAPITAL LETTER U WITH CARON", ""] +, ["ucaron", 468, "LATIN SMALL LETTER U WITH CARON", ""] +, ["AEmacron", 482, "LATIN CAPITAL LETTER AE WITH MACRON", ""] +, ["aemacron", 483, "LATIN SMALL LETTER AE WITH MACRON", ""] +, ["Gcaron", 486, "LATIN CAPITAL LETTER G WITH CARON", ""] +, ["gcaron", 487, "LATIN SMALL LETTER G WITH CARON", ""] +, ["Kcaron", 488, "LATIN CAPITAL LETTER K WITH CARON", ""] +, ["kcaron", 489, "LATIN SMALL LETTER K WITH CARON", ""] +, ["Oogonek", 490, "LATIN CAPITAL LETTER O WITH OGONEK", ""] +, ["oogonek", 491, "LATIN SMALL LETTER O WITH OGONEK", ""] +, ["Gacute", 500, "LATIN CAPITAL LETTER G WITH ACUTE", ""] +, ["gacute", 501, "LATIN SMALL LETTER G WITH ACUTE", ""] +, ["uni01F8", 504, "LATIN CAPITAL LETTER N WITH GRAVE", ""] +, ["uni01F9", 505, "LATIN SMALL LETTER N WITH GRAVE", ""] +, ["uni021E", 542, "LATIN CAPITAL LETTER H WITH CARON", ""] +, ["uni021F", 543, "LATIN SMALL LETTER H WITH CARON", ""] +, ["uni0226", 550, "LATIN CAPITAL LETTER A WITH DOT ABOVE", ""] +, ["uni0227", 551, "LATIN SMALL LETTER A WITH DOT ABOVE", ""] +, ["uni0228", 552, "LATIN CAPITAL LETTER E WITH CEDILLA", ""] +, ["uni0229", 553, "LATIN SMALL LETTER E WITH CEDILLA", ""] +, ["uni022A", 554, "LATIN CAPITAL LETTER O WITH DIAERESIS AND MACRON", ""] +, ["uni022B", 555, "LATIN SMALL LETTER O WITH DIAERESIS AND MACRON", ""] +, ["uni022C", 556, "LATIN CAPITAL LETTER O WITH TILDE AND MACRON", ""] +, ["uni022D", 557, "LATIN SMALL LETTER O WITH TILDE AND MACRON", ""] +, ["uni022E", 558, "LATIN CAPITAL LETTER O WITH DOT ABOVE", ""] +, ["uni022F", 559, "LATIN SMALL LETTER O WITH DOT ABOVE", ""] +, ["uni0230", 560, "LATIN CAPITAL LETTER O WITH DOT ABOVE AND MACRON", ""] +, ["uni0231", 561, "LATIN SMALL LETTER O WITH DOT ABOVE AND MACRON", ""] +, ["uni0232", 562, "LATIN CAPITAL LETTER Y WITH MACRON", ""] +, ["uni0233", 563, "LATIN SMALL LETTER Y WITH MACRON", ""] +, ["uni0299", 665, "LATIN LETTER SMALL CAPITAL B", ""] +, ["uni029C", 668, "LATIN LETTER SMALL CAPITAL H", ""] +, ["uni037F", 895, "GREEK CAPITAL LETTER YOT", ""] +, ["uni03F4", 1012, "GREEK CAPITAL THETA SYMBOL", ""] +, ["uni03F7", 1015, "GREEK CAPITAL LETTER SHO", ""] +, ["uni03F8", 1016, "GREEK SMALL LETTER SHO", ""] +, ["uni03F9", 1017, "GREEK CAPITAL LUNATE SIGMA SYMBOL", ""] +, ["uni03FA", 1018, "GREEK CAPITAL LETTER SAN", ""] +, ["uni051A", 1306, "CYRILLIC CAPITAL LETTER QA", ""] +, ["uni051B", 1307, "CYRILLIC SMALL LETTER QA", ""] +, ["uni051C", 1308, "CYRILLIC CAPITAL LETTER WE", ""] +, ["uni051D", 1309, "CYRILLIC SMALL LETTER WE", ""] +, ["uni1F00", 7936, "GREEK SMALL LETTER ALPHA WITH PSILI", ""] +, ["uni1F01", 7937, "GREEK SMALL LETTER ALPHA WITH DASIA", ""] +, ["uni1F02", 7938, "GREEK SMALL LETTER ALPHA WITH PSILI AND VARIA", ""] +, ["uni1F03", 7939, "GREEK SMALL LETTER ALPHA WITH DASIA AND VARIA", ""] +, ["uni1F04", 7940, "GREEK SMALL LETTER ALPHA WITH PSILI AND OXIA", ""] +, ["uni1F05", 7941, "GREEK SMALL LETTER ALPHA WITH DASIA AND OXIA", ""] +, ["uni1F06", 7942, "GREEK SMALL LETTER ALPHA WITH PSILI AND PERISPOMENI", ""] +, ["uni1F07", 7943, "GREEK SMALL LETTER ALPHA WITH DASIA AND PERISPOMENI", ""] +, ["uni1F08", 7944, "GREEK CAPITAL LETTER ALPHA WITH PSILI", ""] +, ["uni1F09", 7945, "GREEK CAPITAL LETTER ALPHA WITH DASIA", ""] +, ["uni1F0A", 7946, "GREEK CAPITAL LETTER ALPHA WITH PSILI AND VARIA", ""] +, ["uni1F0B", 7947, "GREEK CAPITAL LETTER ALPHA WITH DASIA AND VARIA", ""] +, ["uni1F0C", 7948, "GREEK CAPITAL LETTER ALPHA WITH PSILI AND OXIA", ""] +, ["uni1F0D", 7949, "GREEK CAPITAL LETTER ALPHA WITH DASIA AND OXIA", ""] +, ["uni1F0E", 7950, "GREEK CAPITAL LETTER ALPHA WITH PSILI AND PERISPOMENI", ""] +, ["uni1F0F", 7951, "GREEK CAPITAL LETTER ALPHA WITH DASIA AND PERISPOMENI", ""] +, ["uni1F10", 7952, "GREEK SMALL LETTER EPSILON WITH PSILI", ""] +, ["uni1F11", 7953, "GREEK SMALL LETTER EPSILON WITH DASIA", ""] +, ["uni1F12", 7954, "GREEK SMALL LETTER EPSILON WITH PSILI AND VARIA", ""] +, ["uni1F13", 7955, "GREEK SMALL LETTER EPSILON WITH DASIA AND VARIA", ""] +, ["uni1F14", 7956, "GREEK SMALL LETTER EPSILON WITH PSILI AND OXIA", ""] +, ["uni1F15", 7957, "GREEK SMALL LETTER EPSILON WITH DASIA AND OXIA", ""] +, ["uni1F18", 7960, "GREEK CAPITAL LETTER EPSILON WITH PSILI", ""] +, ["uni1F19", 7961, "GREEK CAPITAL LETTER EPSILON WITH DASIA", ""] +, ["uni1F1A", 7962, "GREEK CAPITAL LETTER EPSILON WITH PSILI AND VARIA", ""] +, ["uni1F1B", 7963, "GREEK CAPITAL LETTER EPSILON WITH DASIA AND VARIA", ""] +, ["uni1F1C", 7964, "GREEK CAPITAL LETTER EPSILON WITH PSILI AND OXIA", ""] +, ["uni1F1D", 7965, "GREEK CAPITAL LETTER EPSILON WITH DASIA AND OXIA", ""] +, ["uni1F20", 7968, "GREEK SMALL LETTER ETA WITH PSILI", ""] +, ["uni1F21", 7969, "GREEK SMALL LETTER ETA WITH DASIA", ""] +, ["uni1F22", 7970, "GREEK SMALL LETTER ETA WITH PSILI AND VARIA", ""] +, ["uni1F23", 7971, "GREEK SMALL LETTER ETA WITH DASIA AND VARIA", ""] +, ["uni1F24", 7972, "GREEK SMALL LETTER ETA WITH PSILI AND OXIA", ""] +, ["uni1F25", 7973, "GREEK SMALL LETTER ETA WITH DASIA AND OXIA", ""] +, ["uni1F26", 7974, "GREEK SMALL LETTER ETA WITH PSILI AND PERISPOMENI", ""] +, ["uni1F27", 7975, "GREEK SMALL LETTER ETA WITH DASIA AND PERISPOMENI", ""] +, ["uni1F28", 7976, "GREEK CAPITAL LETTER ETA WITH PSILI", ""] +, ["uni1F29", 7977, "GREEK CAPITAL LETTER ETA WITH DASIA", ""] +, ["uni1F2A", 7978, "GREEK CAPITAL LETTER ETA WITH PSILI AND VARIA", ""] +, ["uni1F2B", 7979, "GREEK CAPITAL LETTER ETA WITH DASIA AND VARIA", ""] +, ["uni1F2C", 7980, "GREEK CAPITAL LETTER ETA WITH PSILI AND OXIA", ""] +, ["uni1F2D", 7981, "GREEK CAPITAL LETTER ETA WITH DASIA AND OXIA", ""] +, ["uni1F2E", 7982, "GREEK CAPITAL LETTER ETA WITH PSILI AND PERISPOMENI", ""] +, ["uni1F2F", 7983, "GREEK CAPITAL LETTER ETA WITH DASIA AND PERISPOMENI", ""] +, ["uni1F30", 7984, "GREEK SMALL LETTER IOTA WITH PSILI", ""] +, ["uni1F31", 7985, "GREEK SMALL LETTER IOTA WITH DASIA", ""] +, ["uni1F32", 7986, "GREEK SMALL LETTER IOTA WITH PSILI AND VARIA", ""] +, ["uni1F33", 7987, "GREEK SMALL LETTER IOTA WITH DASIA AND VARIA", ""] +, ["uni1F34", 7988, "GREEK SMALL LETTER IOTA WITH PSILI AND OXIA", ""] +, ["uni1F35", 7989, "GREEK SMALL LETTER IOTA WITH DASIA AND OXIA", ""] +, ["uni1F36", 7990, "GREEK SMALL LETTER IOTA WITH PSILI AND PERISPOMENI", ""] +, ["uni1F37", 7991, "GREEK SMALL LETTER IOTA WITH DASIA AND PERISPOMENI", ""] +, ["uni1F38", 7992, "GREEK CAPITAL LETTER IOTA WITH PSILI", ""] +, ["uni1F39", 7993, "GREEK CAPITAL LETTER IOTA WITH DASIA", ""] +, ["uni1F3A", 7994, "GREEK CAPITAL LETTER IOTA WITH PSILI AND VARIA", ""] +, ["uni1F3B", 7995, "GREEK CAPITAL LETTER IOTA WITH DASIA AND VARIA", ""] +, ["uni1F3C", 7996, "GREEK CAPITAL LETTER IOTA WITH PSILI AND OXIA", ""] +, ["uni1F3D", 7997, "GREEK CAPITAL LETTER IOTA WITH DASIA AND OXIA", ""] +, ["uni1F3E", 7998, "GREEK CAPITAL LETTER IOTA WITH PSILI AND PERISPOMENI", ""] +, ["uni1F3F", 7999, "GREEK CAPITAL LETTER IOTA WITH DASIA AND PERISPOMENI", ""] +, ["uni1F40", 8000, "GREEK SMALL LETTER OMICRON WITH PSILI", ""] +, ["uni1F41", 8001, "GREEK SMALL LETTER OMICRON WITH DASIA", ""] +, ["uni1F42", 8002, "GREEK SMALL LETTER OMICRON WITH PSILI AND VARIA", ""] +, ["uni1F43", 8003, "GREEK SMALL LETTER OMICRON WITH DASIA AND VARIA", ""] +, ["uni1F44", 8004, "GREEK SMALL LETTER OMICRON WITH PSILI AND OXIA", ""] +, ["uni1F45", 8005, "GREEK SMALL LETTER OMICRON WITH DASIA AND OXIA", ""] +, ["uni1F48", 8008, "GREEK CAPITAL LETTER OMICRON WITH PSILI", ""] +, ["uni1F49", 8009, "GREEK CAPITAL LETTER OMICRON WITH DASIA", ""] +, ["uni1F4A", 8010, "GREEK CAPITAL LETTER OMICRON WITH PSILI AND VARIA", ""] +, ["uni1F4B", 8011, "GREEK CAPITAL LETTER OMICRON WITH DASIA AND VARIA", ""] +, ["uni1F4C", 8012, "GREEK CAPITAL LETTER OMICRON WITH PSILI AND OXIA", ""] +, ["uni1F4D", 8013, "GREEK CAPITAL LETTER OMICRON WITH DASIA AND OXIA", ""] +, ["uni1F50", 8016, "GREEK SMALL LETTER UPSILON WITH PSILI", ""] +, ["uni1F51", 8017, "GREEK SMALL LETTER UPSILON WITH DASIA", ""] +, ["uni1F52", 8018, "GREEK SMALL LETTER UPSILON WITH PSILI AND VARIA", ""] +, ["uni1F53", 8019, "GREEK SMALL LETTER UPSILON WITH DASIA AND VARIA", ""] +, ["uni1F54", 8020, "GREEK SMALL LETTER UPSILON WITH PSILI AND OXIA", ""] +, ["uni1F55", 8021, "GREEK SMALL LETTER UPSILON WITH DASIA AND OXIA", ""] +, ["uni1F56", 8022, "GREEK SMALL LETTER UPSILON WITH PSILI AND PERISPOMENI", ""] +, ["uni1F57", 8023, "GREEK SMALL LETTER UPSILON WITH DASIA AND PERISPOMENI", ""] +, ["uni1F59", 8025, "GREEK CAPITAL LETTER UPSILON WITH DASIA", ""] +, ["uni1F5B", 8027, "GREEK CAPITAL LETTER UPSILON WITH DASIA AND VARIA", ""] +, ["uni1F5D", 8029, "GREEK CAPITAL LETTER UPSILON WITH DASIA AND OXIA", ""] +, ["uni1F5F", 8031, "GREEK CAPITAL LETTER UPSILON WITH DASIA AND PERISPOMENI", ""] +, ["uni1F60", 8032, "GREEK SMALL LETTER OMEGA WITH PSILI", ""] +, ["uni1F61", 8033, "GREEK SMALL LETTER OMEGA WITH DASIA", ""] +, ["uni1F62", 8034, "GREEK SMALL LETTER OMEGA WITH PSILI AND VARIA", ""] +, ["uni1F63", 8035, "GREEK SMALL LETTER OMEGA WITH DASIA AND VARIA", ""] +, ["uni1F64", 8036, "GREEK SMALL LETTER OMEGA WITH PSILI AND OXIA", ""] +, ["uni1F65", 8037, "GREEK SMALL LETTER OMEGA WITH DASIA AND OXIA", ""] +, ["uni1F66", 8038, "GREEK SMALL LETTER OMEGA WITH PSILI AND PERISPOMENI", ""] +, ["uni1F67", 8039, "GREEK SMALL LETTER OMEGA WITH DASIA AND PERISPOMENI", ""] +, ["uni1F68", 8040, "GREEK CAPITAL LETTER OMEGA WITH PSILI", ""] +, ["uni1F69", 8041, "GREEK CAPITAL LETTER OMEGA WITH DASIA", ""] +, ["uni1F6A", 8042, "GREEK CAPITAL LETTER OMEGA WITH PSILI AND VARIA", ""] +, ["uni1F6B", 8043, "GREEK CAPITAL LETTER OMEGA WITH DASIA AND VARIA", ""] +, ["uni1F6C", 8044, "GREEK CAPITAL LETTER OMEGA WITH PSILI AND OXIA", ""] +, ["uni1F6D", 8045, "GREEK CAPITAL LETTER OMEGA WITH DASIA AND OXIA", ""] +, ["uni1F6E", 8046, "GREEK CAPITAL LETTER OMEGA WITH PSILI AND PERISPOMENI", ""] +, ["uni1F6F", 8047, "GREEK CAPITAL LETTER OMEGA WITH DASIA AND PERISPOMENI", ""] +, ["uni1F70", 8048, "GREEK SMALL LETTER ALPHA WITH VARIA", ""] +, ["uni1F71", 8049, "GREEK SMALL LETTER ALPHA WITH OXIA", ""] +, ["uni1F72", 8050, "GREEK SMALL LETTER EPSILON WITH VARIA", ""] +, ["uni1F73", 8051, "GREEK SMALL LETTER EPSILON WITH OXIA", ""] +, ["uni1F74", 8052, "GREEK SMALL LETTER ETA WITH VARIA", ""] +, ["uni1F75", 8053, "GREEK SMALL LETTER ETA WITH OXIA", ""] +, ["uni1F76", 8054, "GREEK SMALL LETTER IOTA WITH VARIA", ""] +, ["uni1F77", 8055, "GREEK SMALL LETTER IOTA WITH OXIA", ""] +, ["uni1F78", 8056, "GREEK SMALL LETTER OMICRON WITH VARIA", ""] +, ["uni1F79", 8057, "GREEK SMALL LETTER OMICRON WITH OXIA", ""] +, ["uni1F7A", 8058, "GREEK SMALL LETTER UPSILON WITH VARIA", ""] +, ["uni1F7B", 8059, "GREEK SMALL LETTER UPSILON WITH OXIA", ""] +, ["uni1F7C", 8060, "GREEK SMALL LETTER OMEGA WITH VARIA", ""] +, ["uni1F7D", 8061, "GREEK SMALL LETTER OMEGA WITH OXIA", ""] +, ["uni1F80", 8064, "GREEK SMALL LETTER ALPHA WITH PSILI AND YPOGEGRAMMENI", ""] +, ["uni1F81", 8065, "GREEK SMALL LETTER ALPHA WITH DASIA AND YPOGEGRAMMENI", ""] +, ["uni1F82", 8066, "GREEK SMALL LETTER ALPHA WITH PSILI AND VARIA AND YPOGEGRAMMENI", ""] +, ["uni1F83", 8067, "GREEK SMALL LETTER ALPHA WITH DASIA AND VARIA AND YPOGEGRAMMENI", ""] +, ["uni1F84", 8068, "GREEK SMALL LETTER ALPHA WITH PSILI AND OXIA AND YPOGEGRAMMENI", ""] +, ["uni1F85", 8069, "GREEK SMALL LETTER ALPHA WITH DASIA AND OXIA AND YPOGEGRAMMENI", ""] +, ["uni1F86", 8070, "GREEK SMALL LETTER ALPHA WITH PSILI AND PERISPOMENI AND YPOGEGRAMMENI", ""] +, ["uni1F87", 8071, "GREEK SMALL LETTER ALPHA WITH DASIA AND PERISPOMENI AND YPOGEGRAMMENI", ""] +, ["uni1F88", 8072, "GREEK CAPITAL LETTER ALPHA WITH PSILI AND PROSGEGRAMMENI", ""] +, ["uni1F89", 8073, "GREEK CAPITAL LETTER ALPHA WITH DASIA AND PROSGEGRAMMENI", ""] +, ["uni1F8A", 8074, "GREEK CAPITAL LETTER ALPHA WITH PSILI AND VARIA AND PROSGEGRAMMENI", ""] +, ["uni1F8B", 8075, "GREEK CAPITAL LETTER ALPHA WITH DASIA AND VARIA AND PROSGEGRAMMENI", ""] +, ["uni1F8C", 8076, "GREEK CAPITAL LETTER ALPHA WITH PSILI AND OXIA AND PROSGEGRAMMENI", ""] +, ["uni1F8D", 8077, "GREEK CAPITAL LETTER ALPHA WITH DASIA AND OXIA AND PROSGEGRAMMENI", ""] +, ["uni1F8E", 8078, "GREEK CAPITAL LETTER ALPHA WITH PSILI AND PERISPOMENI AND PROSGEGRAMMENI", ""] +, ["uni1F8F", 8079, "GREEK CAPITAL LETTER ALPHA WITH DASIA AND PERISPOMENI AND PROSGEGRAMMENI", ""] +, ["uni1F90", 8080, "GREEK SMALL LETTER ETA WITH PSILI AND YPOGEGRAMMENI", ""] +, ["uni1F91", 8081, "GREEK SMALL LETTER ETA WITH DASIA AND YPOGEGRAMMENI", ""] +, ["uni1F92", 8082, "GREEK SMALL LETTER ETA WITH PSILI AND VARIA AND YPOGEGRAMMENI", ""] +, ["uni1F93", 8083, "GREEK SMALL LETTER ETA WITH DASIA AND VARIA AND YPOGEGRAMMENI", ""] +, ["uni1F94", 8084, "GREEK SMALL LETTER ETA WITH PSILI AND OXIA AND YPOGEGRAMMENI", ""] +, ["uni1F95", 8085, "GREEK SMALL LETTER ETA WITH DASIA AND OXIA AND YPOGEGRAMMENI", ""] +, ["uni1F96", 8086, "GREEK SMALL LETTER ETA WITH PSILI AND PERISPOMENI AND YPOGEGRAMMENI", ""] +, ["uni1F97", 8087, "GREEK SMALL LETTER ETA WITH DASIA AND PERISPOMENI AND YPOGEGRAMMENI", ""] +, ["uni1F98", 8088, "GREEK CAPITAL LETTER ETA WITH PSILI AND PROSGEGRAMMENI", ""] +, ["uni1F99", 8089, "GREEK CAPITAL LETTER ETA WITH DASIA AND PROSGEGRAMMENI", ""] +, ["uni1F9A", 8090, "GREEK CAPITAL LETTER ETA WITH PSILI AND VARIA AND PROSGEGRAMMENI", ""] +, ["uni1F9B", 8091, "GREEK CAPITAL LETTER ETA WITH DASIA AND VARIA AND PROSGEGRAMMENI", ""] +, ["uni1F9C", 8092, "GREEK CAPITAL LETTER ETA WITH PSILI AND OXIA AND PROSGEGRAMMENI", ""] +, ["uni1F9D", 8093, "GREEK CAPITAL LETTER ETA WITH DASIA AND OXIA AND PROSGEGRAMMENI", ""] +, ["uni1F9E", 8094, "GREEK CAPITAL LETTER ETA WITH PSILI AND PERISPOMENI AND PROSGEGRAMMENI", ""] +, ["uni1F9F", 8095, "GREEK CAPITAL LETTER ETA WITH DASIA AND PERISPOMENI AND PROSGEGRAMMENI", ""] +, ["uni1FA0", 8096, "GREEK SMALL LETTER OMEGA WITH PSILI AND YPOGEGRAMMENI", ""] +, ["uni1FA1", 8097, "GREEK SMALL LETTER OMEGA WITH DASIA AND YPOGEGRAMMENI", ""] +, ["uni1FA2", 8098, "GREEK SMALL LETTER OMEGA WITH PSILI AND VARIA AND YPOGEGRAMMENI", ""] +, ["uni1FA3", 8099, "GREEK SMALL LETTER OMEGA WITH DASIA AND VARIA AND YPOGEGRAMMENI", ""] +, ["uni1FA4", 8100, "GREEK SMALL LETTER OMEGA WITH PSILI AND OXIA AND YPOGEGRAMMENI", ""] +, ["uni1FA5", 8101, "GREEK SMALL LETTER OMEGA WITH DASIA AND OXIA AND YPOGEGRAMMENI", ""] +, ["uni1FA6", 8102, "GREEK SMALL LETTER OMEGA WITH PSILI AND PERISPOMENI AND YPOGEGRAMMENI", ""] +, ["uni1FA7", 8103, "GREEK SMALL LETTER OMEGA WITH DASIA AND PERISPOMENI AND YPOGEGRAMMENI", ""] +, ["uni1FA8", 8104, "GREEK CAPITAL LETTER OMEGA WITH PSILI AND PROSGEGRAMMENI", ""] +, ["uni1FA9", 8105, "GREEK CAPITAL LETTER OMEGA WITH DASIA AND PROSGEGRAMMENI", ""] +, ["uni1FAA", 8106, "GREEK CAPITAL LETTER OMEGA WITH PSILI AND VARIA AND PROSGEGRAMMENI", ""] +, ["uni1FAB", 8107, "GREEK CAPITAL LETTER OMEGA WITH DASIA AND VARIA AND PROSGEGRAMMENI", ""] +, ["uni1FAC", 8108, "GREEK CAPITAL LETTER OMEGA WITH PSILI AND OXIA AND PROSGEGRAMMENI", ""] +, ["uni1FAD", 8109, "GREEK CAPITAL LETTER OMEGA WITH DASIA AND OXIA AND PROSGEGRAMMENI", ""] +, ["uni1FAE", 8110, "GREEK CAPITAL LETTER OMEGA WITH PSILI AND PERISPOMENI AND PROSGEGRAMMENI", ""] +, ["uni1FAF", 8111, "GREEK CAPITAL LETTER OMEGA WITH DASIA AND PERISPOMENI AND PROSGEGRAMMENI", ""] +, ["uni1FB0", 8112, "GREEK SMALL LETTER ALPHA WITH VRACHY", ""] +, ["uni1FB1", 8113, "GREEK SMALL LETTER ALPHA WITH MACRON", ""] +, ["uni1FB2", 8114, "GREEK SMALL LETTER ALPHA WITH VARIA AND YPOGEGRAMMENI", ""] +, ["uni1FB3", 8115, "GREEK SMALL LETTER ALPHA WITH YPOGEGRAMMENI", ""] +, ["uni1FB4", 8116, "GREEK SMALL LETTER ALPHA WITH OXIA AND YPOGEGRAMMENI", ""] +, ["uni1FB6", 8118, "GREEK SMALL LETTER ALPHA WITH PERISPOMENI", ""] +, ["uni1FB7", 8119, "GREEK SMALL LETTER ALPHA WITH PERISPOMENI AND YPOGEGRAMMENI", ""] +, ["uni1FB8", 8120, "GREEK CAPITAL LETTER ALPHA WITH VRACHY", ""] +, ["uni1FB9", 8121, "GREEK CAPITAL LETTER ALPHA WITH MACRON", ""] +, ["uni1FBA", 8122, "GREEK CAPITAL LETTER ALPHA WITH VARIA", ""] +, ["uni1FBB", 8123, "GREEK CAPITAL LETTER ALPHA WITH OXIA", ""] +, ["uni1FBC", 8124, "GREEK CAPITAL LETTER ALPHA WITH PROSGEGRAMMENI", ""] +, ["uni1FC2", 8130, "GREEK SMALL LETTER ETA WITH VARIA AND YPOGEGRAMMENI", ""] +, ["uni1FC3", 8131, "GREEK SMALL LETTER ETA WITH YPOGEGRAMMENI", ""] +, ["uni1FC4", 8132, "GREEK SMALL LETTER ETA WITH OXIA AND YPOGEGRAMMENI", ""] +, ["uni1FC6", 8134, "GREEK SMALL LETTER ETA WITH PERISPOMENI", ""] +, ["uni1FC7", 8135, "GREEK SMALL LETTER ETA WITH PERISPOMENI AND YPOGEGRAMMENI", ""] +, ["uni1FC8", 8136, "GREEK CAPITAL LETTER EPSILON WITH VARIA", ""] +, ["uni1FC9", 8137, "GREEK CAPITAL LETTER EPSILON WITH OXIA", ""] +, ["uni1FCA", 8138, "GREEK CAPITAL LETTER ETA WITH VARIA", ""] +, ["uni1FCB", 8139, "GREEK CAPITAL LETTER ETA WITH OXIA", ""] +, ["uni1FCC", 8140, "GREEK CAPITAL LETTER ETA WITH PROSGEGRAMMENI", ""] +, ["uni1FD0", 8144, "GREEK SMALL LETTER IOTA WITH VRACHY", ""] +, ["uni1FD1", 8145, "GREEK SMALL LETTER IOTA WITH MACRON", ""] +, ["uni1FD2", 8146, "GREEK SMALL LETTER IOTA WITH DIALYTIKA AND VARIA", ""] +, ["uni1FD3", 8147, "GREEK SMALL LETTER IOTA WITH DIALYTIKA AND OXIA", ""] +, ["uni1FD6", 8150, "GREEK SMALL LETTER IOTA WITH PERISPOMENI", ""] +, ["uni1FD7", 8151, "GREEK SMALL LETTER IOTA WITH DIALYTIKA AND PERISPOMENI", ""] +, ["uni1FD8", 8152, "GREEK CAPITAL LETTER IOTA WITH VRACHY", ""] +, ["uni1FD9", 8153, "GREEK CAPITAL LETTER IOTA WITH MACRON", ""] +, ["uni1FDA", 8154, "GREEK CAPITAL LETTER IOTA WITH VARIA", ""] +, ["uni1FDB", 8155, "GREEK CAPITAL LETTER IOTA WITH OXIA", ""] +, ["uni1FE0", 8160, "GREEK SMALL LETTER UPSILON WITH VRACHY", ""] +, ["uni1FE1", 8161, "GREEK SMALL LETTER UPSILON WITH MACRON", ""] +, ["uni1FE2", 8162, "GREEK SMALL LETTER UPSILON WITH DIALYTIKA AND VARIA", ""] +, ["uni1FE3", 8163, "GREEK SMALL LETTER UPSILON WITH DIALYTIKA AND OXIA", ""] +, ["uni1FE4", 8164, "GREEK SMALL LETTER RHO WITH PSILI", ""] +, ["uni1FE5", 8165, "GREEK SMALL LETTER RHO WITH DASIA", ""] +, ["uni1FE6", 8166, "GREEK SMALL LETTER UPSILON WITH PERISPOMENI", ""] +, ["uni1FE7", 8167, "GREEK SMALL LETTER UPSILON WITH DIALYTIKA AND PERISPOMENI", ""] +, ["uni1FE8", 8168, "GREEK CAPITAL LETTER UPSILON WITH VRACHY", ""] +, ["uni1FE9", 8169, "GREEK CAPITAL LETTER UPSILON WITH MACRON", ""] +, ["uni1FEA", 8170, "GREEK CAPITAL LETTER UPSILON WITH VARIA", ""] +, ["uni1FEB", 8171, "GREEK CAPITAL LETTER UPSILON WITH OXIA", ""] +, ["uni1FEC", 8172, "GREEK CAPITAL LETTER RHO WITH DASIA", ""] +, ["uni1FF2", 8178, "GREEK SMALL LETTER OMEGA WITH VARIA AND YPOGEGRAMMENI", ""] +, ["uni1FF3", 8179, "GREEK SMALL LETTER OMEGA WITH YPOGEGRAMMENI", ""] +, ["uni1FF4", 8180, "GREEK SMALL LETTER OMEGA WITH OXIA AND YPOGEGRAMMENI", ""] +, ["uni1FF6", 8182, "GREEK SMALL LETTER OMEGA WITH PERISPOMENI", ""] +, ["uni1FF7", 8183, "GREEK SMALL LETTER OMEGA WITH PERISPOMENI AND YPOGEGRAMMENI", ""] +, ["uni1FF8", 8184, "GREEK CAPITAL LETTER OMICRON WITH VARIA", ""] +, ["uni1FF9", 8185, "GREEK CAPITAL LETTER OMICRON WITH OXIA", ""] +, ["uni1FFA", 8186, "GREEK CAPITAL LETTER OMEGA WITH VARIA", ""] +, ["uni1FFB", 8187, "GREEK CAPITAL LETTER OMEGA WITH OXIA", ""] +, ["uni1FFC", 8188, "GREEK CAPITAL LETTER OMEGA WITH PROSGEGRAMMENI", ""] +, ["uni202F", 8239, "NARROW NO-BREAK SPACE", ""] +, ["uni2047", 8263, "DOUBLE QUESTION MARK", ""] +, ["uni2048", 8264, "QUESTION EXCLAMATION MARK", ""] +, ["uni2049", 8265, "EXCLAMATION QUESTION MARK", ""] +, ["uni205F", 8287, "MEDIUM MATHEMATICAL SPACE", ""] +, ["uniA78F", 42895, "LATIN LETTER SINOLOGICAL DOT", ""] +, ["softhyphen", 173, "SOFT HYPHEN", ""] +, ["Cdotaccent", 266, "LATIN CAPITAL LETTER C WITH DOT ABOVE", ""] +, ["cdotaccent", 267, "LATIN SMALL LETTER C WITH DOT ABOVE", ""] +, ["Gdotaccent", 288, "LATIN CAPITAL LETTER G WITH DOT ABOVE", ""] +, ["gdotaccent", 289, "LATIN SMALL LETTER G WITH DOT ABOVE", ""] +, ["Scommaaccent", 536, "LATIN CAPITAL LETTER S WITH COMMA BELOW", ""] +, ["scommaaccent", 537, "LATIN SMALL LETTER S WITH COMMA BELOW", ""] +, ["Tcommaaccent", 354, "LATIN CAPITAL LETTER T WITH CEDILLA", ""] +, ["tcommaaccent", 355, "LATIN SMALL LETTER T WITH CEDILLA", ""] +, ["mugreek", 956, "GREEK SMALL LETTER MU", ""] +, ["afii10023", 1025, "CYRILLIC CAPITAL LETTER IO", ""] +, ["afii10052", 1027, "CYRILLIC CAPITAL LETTER GJE", ""] +, ["afii10054", 1029, "CYRILLIC CAPITAL LETTER DZE", ""] +, ["afii10055", 1030, "CYRILLIC CAPITAL LETTER BYELORUSSIAN-UKRAINIAN I", ""] +, ["afii10056", 1031, "CYRILLIC CAPITAL LETTER YI", ""] +, ["afii10057", 1032, "CYRILLIC CAPITAL LETTER JE", ""] +, ["afii10061", 1036, "CYRILLIC CAPITAL LETTER KJE", ""] +, ["afii10062", 1038, "CYRILLIC CAPITAL LETTER SHORT U", ""] +, ["afii10017", 1040, "CYRILLIC CAPITAL LETTER A", ""] +, ["afii10019", 1042, "CYRILLIC CAPITAL LETTER VE", ""] +, ["afii10020", 1043, "CYRILLIC CAPITAL LETTER GHE", ""] +, ["afii10022", 1045, "CYRILLIC CAPITAL LETTER IE", ""] +, ["afii10027", 1049, "CYRILLIC CAPITAL LETTER SHORT I", ""] +, ["afii10030", 1052, "CYRILLIC CAPITAL LETTER EM", ""] +, ["afii10031", 1053, "CYRILLIC CAPITAL LETTER EN", ""] +, ["afii10032", 1054, "CYRILLIC CAPITAL LETTER O", ""] +, ["afii10033", 1055, "CYRILLIC CAPITAL LETTER PE", ""] +, ["afii10034", 1056, "CYRILLIC CAPITAL LETTER ER", ""] +, ["afii10035", 1057, "CYRILLIC CAPITAL LETTER ES", ""] +, ["afii10036", 1058, "CYRILLIC CAPITAL LETTER TE", ""] +, ["afii10039", 1061, "CYRILLIC CAPITAL LETTER HA", ""] +, ["afii10065", 1072, "CYRILLIC SMALL LETTER A", ""] +, ["iecyrillic", 1077, "CYRILLIC SMALL LETTER IE", ""] +, ["iishortcyrillic", 1081, "CYRILLIC SMALL LETTER SHORT I", ""] +, ["ocyrillic", 1086, "CYRILLIC SMALL LETTER O", ""] +, ["ercyrillic", 1088, "CYRILLIC SMALL LETTER ER", ""] +, ["escyrillic", 1089, "CYRILLIC SMALL LETTER ES", ""] +, ["ucyrillic", 1091, "CYRILLIC SMALL LETTER U", ""] +, ["khacyrillic", 1093, "CYRILLIC SMALL LETTER HA", ""] +, ["iocyrillic", 1105, "CYRILLIC SMALL LETTER IO", ""] +, ["gjecyrillic", 1107, "CYRILLIC SMALL LETTER GJE", ""] +, ["dzecyrillic", 1109, "CYRILLIC SMALL LETTER DZE", ""] +, ["icyrillic", 1110, "CYRILLIC SMALL LETTER BYELORUSSIAN-UKRAINIAN I", ""] +, ["yicyrillic", 1111, "CYRILLIC SMALL LETTER YI", ""] +, ["jecyrillic", 1112, "CYRILLIC SMALL LETTER JE", ""] +, ["kjecyrillic", 1116, "CYRILLIC SMALL LETTER KJE", ""] +, ["ushortcyrillic", 1118, "CYRILLIC SMALL LETTER SHORT U", ""] +, ["jcaron", 496, "LATIN SMALL LETTER J WITH CARON", ""] +, ["apostrophemod", 700, "MODIFIER LETTER APOSTROPHE", ""] +, ["Macute", 7742, "LATIN CAPITAL LETTER M WITH ACUTE", ""] +, ["macute", 7743, "LATIN SMALL LETTER M WITH ACUTE", ""] +, ["Aringbelow", 7680, "LATIN CAPITAL LETTER A WITH RING BELOW", ""] +, ["aringbelow", 7681, "LATIN SMALL LETTER A WITH RING BELOW", ""] +, ["Psicyrillic", 1136, "CYRILLIC CAPITAL LETTER PSI", ""] +, ["psicyrillic", 1137, "CYRILLIC SMALL LETTER PSI", ""] +, ["Izhitsadblgravecyrillic", 1142, "CYRILLIC CAPITAL LETTER IZHITSA WITH DOUBLE GRAVE ACCENT", ""] +, ["izhitsadblgravecyrillic", 1143, "CYRILLIC SMALL LETTER IZHITSA WITH DOUBLE GRAVE ACCENT", ""] +, ["ukcyrillic", 1145, "CYRILLIC SMALL LETTER UK", ""] +, ["Ukcyrillic", 1144, "CYRILLIC CAPITAL LETTER UK", ""] +, ["Zedescendercyrillic", 1176, "CYRILLIC CAPITAL LETTER ZE WITH DESCENDER", ""] +, ["zedescendercyrillic", 1177, "CYRILLIC SMALL LETTER ZE WITH DESCENDER", ""] +, ["Esdescendercyrillic", 1194, "CYRILLIC CAPITAL LETTER ES WITH DESCENDER", ""] +, ["esdescendercyrillic", 1195, "CYRILLIC SMALL LETTER ES WITH DESCENDER", ""] +, ["Ustraightcyrillic", 1198, "CYRILLIC CAPITAL LETTER STRAIGHT U", ""] +, ["ustraightcyrillic", 1199, "CYRILLIC SMALL LETTER STRAIGHT U", ""] +, ["palochkacyrillic", 1216, "CYRILLIC LETTER PALOCHKA", ""] +, ["Zhebrevecyrillic", 1217, "CYRILLIC CAPITAL LETTER ZHE WITH BREVE", ""] +, ["zhebrevecyrillic", 1218, "CYRILLIC SMALL LETTER ZHE WITH BREVE", ""] +, ["Abrevecyrillic", 1232, "CYRILLIC CAPITAL LETTER A WITH BREVE", ""] +, ["abrevecyrillic", 1233, "CYRILLIC SMALL LETTER A WITH BREVE", ""] +, ["Adieresiscyrillic", 1234, "CYRILLIC CAPITAL LETTER A WITH DIAERESIS", ""] +, ["adieresiscyrillic", 1235, "CYRILLIC SMALL LETTER A WITH DIAERESIS", ""] +, ["Aiecyrillic", 1236, "CYRILLIC CAPITAL LIGATURE A IE", ""] +, ["aiecyrillic", 1237, "CYRILLIC SMALL LIGATURE A IE", ""] +, ["Iebrevecyrillic", 1238, "CYRILLIC CAPITAL LETTER IE WITH BREVE", ""] +, ["iebrevecyrillic", 1239, "CYRILLIC SMALL LETTER IE WITH BREVE", ""] +, ["Schwadieresiscyrillic", 1242, "CYRILLIC CAPITAL LETTER SCHWA WITH DIAERESIS", ""] +, ["schwacyrillic", 1241, "CYRILLIC SMALL LETTER SCHWA", ""] +, ["schwadieresiscyrillic", 1243, "CYRILLIC SMALL LETTER SCHWA WITH DIAERESIS", ""] +, ["Zhedieresiscyrillic", 1244, "CYRILLIC CAPITAL LETTER ZHE WITH DIAERESIS", ""] +, ["zhedieresiscyrillic", 1245, "CYRILLIC SMALL LETTER ZHE WITH DIAERESIS", ""] +, ["Zedieresiscyrillic", 1246, "CYRILLIC CAPITAL LETTER ZE WITH DIAERESIS", ""] +, ["zedieresiscyrillic", 1247, "CYRILLIC SMALL LETTER ZE WITH DIAERESIS", ""] +, ["Imacroncyrillic", 1250, "CYRILLIC CAPITAL LETTER I WITH MACRON", ""] +, ["imacroncyrillic", 1251, "CYRILLIC SMALL LETTER I WITH MACRON", ""] +, ["Idieresiscyrillic", 1252, "CYRILLIC CAPITAL LETTER I WITH DIAERESIS", ""] +, ["idieresiscyrillic", 1253, "CYRILLIC SMALL LETTER I WITH DIAERESIS", ""] +, ["Odieresiscyrillic", 1254, "CYRILLIC CAPITAL LETTER O WITH DIAERESIS", ""] +, ["odieresiscyrillic", 1255, "CYRILLIC SMALL LETTER O WITH DIAERESIS", ""] +, ["Obarredcyrillic", 1256, "CYRILLIC CAPITAL LETTER BARRED O", ""] +, ["obarredcyrillic", 1257, "CYRILLIC SMALL LETTER BARRED O", ""] +, ["Obarreddieresiscyrillic", 1258, "CYRILLIC CAPITAL LETTER BARRED O WITH DIAERESIS", ""] +, ["obarreddieresiscyrillic", 1259, "CYRILLIC SMALL LETTER BARRED O WITH DIAERESIS", ""] +, ["Umacroncyrillic", 1262, "CYRILLIC CAPITAL LETTER U WITH MACRON", ""] +, ["umacroncyrillic", 1263, "CYRILLIC SMALL LETTER U WITH MACRON", ""] +, ["Udieresiscyrillic", 1264, "CYRILLIC CAPITAL LETTER U WITH DIAERESIS", ""] +, ["udieresiscyrillic", 1265, "CYRILLIC SMALL LETTER U WITH DIAERESIS", ""] +, ["Uhungarumlautcyrillic", 1266, "CYRILLIC CAPITAL LETTER U WITH DOUBLE ACUTE", ""] +, ["uhungarumlautcyrillic", 1267, "CYRILLIC SMALL LETTER U WITH DOUBLE ACUTE", ""] +, ["Chedieresiscyrillic", 1268, "CYRILLIC CAPITAL LETTER CHE WITH DIAERESIS", ""] +, ["chedieresiscyrillic", 1269, "CYRILLIC SMALL LETTER CHE WITH DIAERESIS", ""] +, ["Yerudieresiscyrillic", 1272, "CYRILLIC CAPITAL LETTER YERU WITH DIAERESIS", ""] +, ["yerudieresiscyrillic", 1273, "CYRILLIC SMALL LETTER YERU WITH DIAERESIS", ""] +, ["Adotbelow", 7840, "LATIN CAPITAL LETTER A WITH DOT BELOW", ""] +, ["adotbelow", 7841, "LATIN SMALL LETTER A WITH DOT BELOW", ""] +, ["Ahookabove", 7842, "LATIN CAPITAL LETTER A WITH HOOK ABOVE", ""] +, ["ahookabove", 7843, "LATIN SMALL LETTER A WITH HOOK ABOVE", ""] +, ["Acircumflexacute", 7844, "LATIN CAPITAL LETTER A WITH CIRCUMFLEX AND ACUTE", ""] +, ["acircumflexacute", 7845, "LATIN SMALL LETTER A WITH CIRCUMFLEX AND ACUTE", ""] +, ["Acircumflexgrave", 7846, "LATIN CAPITAL LETTER A WITH CIRCUMFLEX AND GRAVE", ""] +, ["acircumflexgrave", 7847, "LATIN SMALL LETTER A WITH CIRCUMFLEX AND GRAVE", ""] +, ["Acircumflexhookabove", 7848, "LATIN CAPITAL LETTER A WITH CIRCUMFLEX AND HOOK ABOVE", ""] +, ["acircumflexhookabove", 7849, "LATIN SMALL LETTER A WITH CIRCUMFLEX AND HOOK ABOVE", ""] +, ["Acircumflextilde", 7850, "LATIN CAPITAL LETTER A WITH CIRCUMFLEX AND TILDE", ""] +, ["acircumflextilde", 7851, "LATIN SMALL LETTER A WITH CIRCUMFLEX AND TILDE", ""] +, ["Acircumflexdotbelow", 7852, "LATIN CAPITAL LETTER A WITH CIRCUMFLEX AND DOT BELOW", ""] +, ["acircumflexdotbelow", 7853, "LATIN SMALL LETTER A WITH CIRCUMFLEX AND DOT BELOW", ""] +, ["Abreveacute", 7854, "LATIN CAPITAL LETTER A WITH BREVE AND ACUTE", ""] +, ["abreveacute", 7855, "LATIN SMALL LETTER A WITH BREVE AND ACUTE", ""] +, ["Abrevegrave", 7856, "LATIN CAPITAL LETTER A WITH BREVE AND GRAVE", ""] +, ["abrevegrave", 7857, "LATIN SMALL LETTER A WITH BREVE AND GRAVE", ""] +, ["Abrevehookabove", 7858, "LATIN CAPITAL LETTER A WITH BREVE AND HOOK ABOVE", ""] +, ["abrevehookabove", 7859, "LATIN SMALL LETTER A WITH BREVE AND HOOK ABOVE", ""] +, ["Abrevetilde", 7860, "LATIN CAPITAL LETTER A WITH BREVE AND TILDE", ""] +, ["abrevetilde", 7861, "LATIN SMALL LETTER A WITH BREVE AND TILDE", ""] +, ["Abrevedotbelow", 7862, "LATIN CAPITAL LETTER A WITH BREVE AND DOT BELOW", ""] +, ["abrevedotbelow", 7863, "LATIN SMALL LETTER A WITH BREVE AND DOT BELOW", ""] +, ["Edotbelow", 7864, "LATIN CAPITAL LETTER E WITH DOT BELOW", ""] +, ["edotbelow", 7865, "LATIN SMALL LETTER E WITH DOT BELOW", ""] +, ["Ehookabove", 7866, "LATIN CAPITAL LETTER E WITH HOOK ABOVE", ""] +, ["ehookabove", 7867, "LATIN SMALL LETTER E WITH HOOK ABOVE", ""] +, ["Etilde", 7868, "LATIN CAPITAL LETTER E WITH TILDE", ""] +, ["etilde", 7869, "LATIN SMALL LETTER E WITH TILDE", ""] +, ["Ecircumflexacute", 7870, "LATIN CAPITAL LETTER E WITH CIRCUMFLEX AND ACUTE", ""] +, ["ecircumflexacute", 7871, "LATIN SMALL LETTER E WITH CIRCUMFLEX AND ACUTE", ""] +, ["Ecircumflexgrave", 7872, "LATIN CAPITAL LETTER E WITH CIRCUMFLEX AND GRAVE", ""] +, ["ecircumflexgrave", 7873, "LATIN SMALL LETTER E WITH CIRCUMFLEX AND GRAVE", ""] +, ["Ecircumflexhookabove", 7874, "LATIN CAPITAL LETTER E WITH CIRCUMFLEX AND HOOK ABOVE", ""] +, ["ecircumflexhookabove", 7875, "LATIN SMALL LETTER E WITH CIRCUMFLEX AND HOOK ABOVE", ""] +, ["Ecircumflextilde", 7876, "LATIN CAPITAL LETTER E WITH CIRCUMFLEX AND TILDE", ""] +, ["ecircumflextilde", 7877, "LATIN SMALL LETTER E WITH CIRCUMFLEX AND TILDE", ""] +, ["Ecircumflexdotbelow", 7878, "LATIN CAPITAL LETTER E WITH CIRCUMFLEX AND DOT BELOW", ""] +, ["ecircumflexdotbelow", 7879, "LATIN SMALL LETTER E WITH CIRCUMFLEX AND DOT BELOW", ""] +, ["Ihookabove", 7880, "LATIN CAPITAL LETTER I WITH HOOK ABOVE", ""] +, ["ihookabove", 7881, "LATIN SMALL LETTER I WITH HOOK ABOVE", ""] +, ["Idotbelow", 7882, "LATIN CAPITAL LETTER I WITH DOT BELOW", ""] +, ["idotbelow", 7883, "LATIN SMALL LETTER I WITH DOT BELOW", ""] +, ["Odotbelow", 7884, "LATIN CAPITAL LETTER O WITH DOT BELOW", ""] +, ["odotbelow", 7885, "LATIN SMALL LETTER O WITH DOT BELOW", ""] +, ["Ohookabove", 7886, "LATIN CAPITAL LETTER O WITH HOOK ABOVE", ""] +, ["ohookabove", 7887, "LATIN SMALL LETTER O WITH HOOK ABOVE", ""] +, ["Ocircumflexacute", 7888, "LATIN CAPITAL LETTER O WITH CIRCUMFLEX AND ACUTE", ""] +, ["ocircumflexacute", 7889, "LATIN SMALL LETTER O WITH CIRCUMFLEX AND ACUTE", ""] +, ["Ocircumflexgrave", 7890, "LATIN CAPITAL LETTER O WITH CIRCUMFLEX AND GRAVE", ""] +, ["ocircumflexgrave", 7891, "LATIN SMALL LETTER O WITH CIRCUMFLEX AND GRAVE", ""] +, ["Ocircumflexhookabove", 7892, "LATIN CAPITAL LETTER O WITH CIRCUMFLEX AND HOOK ABOVE", ""] +, ["ocircumflexhookabove", 7893, "LATIN SMALL LETTER O WITH CIRCUMFLEX AND HOOK ABOVE", ""] +, ["Ocircumflextilde", 7894, "LATIN CAPITAL LETTER O WITH CIRCUMFLEX AND TILDE", ""] +, ["ocircumflextilde", 7895, "LATIN SMALL LETTER O WITH CIRCUMFLEX AND TILDE", ""] +, ["Ocircumflexdotbelow", 7896, "LATIN CAPITAL LETTER O WITH CIRCUMFLEX AND DOT BELOW", ""] +, ["ocircumflexdotbelow", 7897, "LATIN SMALL LETTER O WITH CIRCUMFLEX AND DOT BELOW", ""] +, ["Ohornacute", 7898, "LATIN CAPITAL LETTER O WITH HORN AND ACUTE", ""] +, ["ohornacute", 7899, "LATIN SMALL LETTER O WITH HORN AND ACUTE", ""] +, ["Ohorngrave", 7900, "LATIN CAPITAL LETTER O WITH HORN AND GRAVE", ""] +, ["ohorngrave", 7901, "LATIN SMALL LETTER O WITH HORN AND GRAVE", ""] +, ["Ohornhookabove", 7902, "LATIN CAPITAL LETTER O WITH HORN AND HOOK ABOVE", ""] +, ["ohornhookabove", 7903, "LATIN SMALL LETTER O WITH HORN AND HOOK ABOVE", ""] +, ["Ohorntilde", 7904, "LATIN CAPITAL LETTER O WITH HORN AND TILDE", ""] +, ["ohorntilde", 7905, "LATIN SMALL LETTER O WITH HORN AND TILDE", ""] +, ["Ohorndotbelow", 7906, "LATIN CAPITAL LETTER O WITH HORN AND DOT BELOW", ""] +, ["ohorndotbelow", 7907, "LATIN SMALL LETTER O WITH HORN AND DOT BELOW", ""] +, ["Udotbelow", 7908, "LATIN CAPITAL LETTER U WITH DOT BELOW", ""] +, ["udotbelow", 7909, "LATIN SMALL LETTER U WITH DOT BELOW", ""] +, ["Uhookabove", 7910, "LATIN CAPITAL LETTER U WITH HOOK ABOVE", ""] +, ["uhookabove", 7911, "LATIN SMALL LETTER U WITH HOOK ABOVE", ""] +, ["Uhornacute", 7912, "LATIN CAPITAL LETTER U WITH HORN AND ACUTE", ""] +, ["uhornacute", 7913, "LATIN SMALL LETTER U WITH HORN AND ACUTE", ""] +, ["Uhorngrave", 7914, "LATIN CAPITAL LETTER U WITH HORN AND GRAVE", ""] +, ["uhorngrave", 7915, "LATIN SMALL LETTER U WITH HORN AND GRAVE", ""] +, ["Uhornhookabove", 7916, "LATIN CAPITAL LETTER U WITH HORN AND HOOK ABOVE", ""] +, ["uhornhookabove", 7917, "LATIN SMALL LETTER U WITH HORN AND HOOK ABOVE", ""] +, ["Uhorntilde", 7918, "LATIN CAPITAL LETTER U WITH HORN AND TILDE", ""] +, ["uhorntilde", 7919, "LATIN SMALL LETTER U WITH HORN AND TILDE", ""] +, ["Uhorndotbelow", 7920, "LATIN CAPITAL LETTER U WITH HORN AND DOT BELOW", ""] +, ["uhorndotbelow", 7921, "LATIN SMALL LETTER U WITH HORN AND DOT BELOW", ""] +, ["Ydotbelow", 7924, "LATIN CAPITAL LETTER Y WITH DOT BELOW", ""] +, ["ydotbelow", 7925, "LATIN SMALL LETTER Y WITH DOT BELOW", ""] +, ["Yhookabove", 7926, "LATIN CAPITAL LETTER Y WITH HOOK ABOVE", ""] +, ["yhookabove", 7927, "LATIN SMALL LETTER Y WITH HOOK ABOVE", ""] +, ["Ytilde", 7928, "LATIN CAPITAL LETTER Y WITH TILDE", ""] +, ["ytilde", 7929, "LATIN SMALL LETTER Y WITH TILDE", ""] +, ["dong", 8363, "DONG SIGN", ""] +, ["Tedescendercyrillic", 1196, "CYRILLIC CAPITAL LETTER TE WITH DESCENDER", ""] +, ["tedescendercyrillic", 1197, "CYRILLIC SMALL LETTER TE WITH DESCENDER", ""] +, ["Chekhakassiancyrillic", 1227, "CYRILLIC CAPITAL LETTER KHAKASSIAN CHE", ""] +, ["chekhakassiancyrillic", 1228, "CYRILLIC SMALL LETTER KHAKASSIAN CHE", ""] +, ["Chedescenderabkhasiancyrillic", 1214, "CYRILLIC CAPITAL LETTER ABKHASIAN CHE WITH DESCENDER", ""] +, ["chedescenderabkhasiancyrillic", 1215, "CYRILLIC SMALL LETTER ABKHASIAN CHE WITH DESCENDER", ""] +, ["shhacyrillic", 1211, "CYRILLIC SMALL LETTER SHHA", ""] +, ["afii10146", 1122, "CYRILLIC CAPITAL LETTER YAT", ""] +, ["Ghestrokecyrillic", 1170, "CYRILLIC CAPITAL LETTER GHE WITH STROKE", ""] +, ["ghestrokecyrillic", 1171, "CYRILLIC SMALL LETTER GHE WITH STROKE", ""] +, ["Ustraightstrokecyrillic", 1200, "CYRILLIC CAPITAL LETTER STRAIGHT U WITH STROKE", ""] +, ["ustraightstrokecyrillic", 1201, "CYRILLIC SMALL LETTER STRAIGHT U WITH STROKE", ""] +, ["horizontalbar", 8213, "HORIZONTAL BAR", ""] +, ["ypogegrammeni", 890, "GREEK YPOGEGRAMMENI", ""] +, ["nlegrightlong", 414, "LATIN SMALL LETTER N WITH LONG RIGHT LEG", ""] +, ["Udieresismacron", 469, "LATIN CAPITAL LETTER U WITH DIAERESIS AND MACRON", ""] +, ["udieresismacron", 470, "LATIN SMALL LETTER U WITH DIAERESIS AND MACRON", ""] +, ["Udieresisacute", 471, "LATIN CAPITAL LETTER U WITH DIAERESIS AND ACUTE", ""] +, ["udieresisacute", 472, "LATIN SMALL LETTER U WITH DIAERESIS AND ACUTE", ""] +, ["Udieresiscaron", 473, "LATIN CAPITAL LETTER U WITH DIAERESIS AND CARON", ""] +, ["udieresiscaron", 474, "LATIN SMALL LETTER U WITH DIAERESIS AND CARON", ""] +, ["Udieresisgrave", 475, "LATIN CAPITAL LETTER U WITH DIAERESIS AND GRAVE", ""] +, ["udieresisgrave", 476, "LATIN SMALL LETTER U WITH DIAERESIS AND GRAVE", ""] +, ["Adieresismacron", 478, "LATIN CAPITAL LETTER A WITH DIAERESIS AND MACRON", ""] +, ["adieresismacron", 479, "LATIN SMALL LETTER A WITH DIAERESIS AND MACRON", ""] +, ["Adotmacron", 480, "LATIN CAPITAL LETTER A WITH DOT ABOVE AND MACRON", ""] +, ["adotmacron", 481, "LATIN SMALL LETTER A WITH DOT ABOVE AND MACRON", ""] +, ["Oogonekmacron", 492, "LATIN CAPITAL LETTER O WITH OGONEK AND MACRON", ""] +, ["oogonekmacron", 493, "LATIN SMALL LETTER O WITH OGONEK AND MACRON", ""] +, ["DZ", 497, "LATIN CAPITAL LETTER DZ", ""] +, ["Dz", 498, "LATIN CAPITAL LETTER D WITH SMALL LETTER Z", ""] +, ["Adblgrave", 512, "LATIN CAPITAL LETTER A WITH DOUBLE GRAVE", ""] +, ["adblgrave", 513, "LATIN SMALL LETTER A WITH DOUBLE GRAVE", ""] +, ["Ainvertedbreve", 514, "LATIN CAPITAL LETTER A WITH INVERTED BREVE", ""] +, ["ainvertedbreve", 515, "LATIN SMALL LETTER A WITH INVERTED BREVE", ""] +, ["Edblgrave", 516, "LATIN CAPITAL LETTER E WITH DOUBLE GRAVE", ""] +, ["edblgrave", 517, "LATIN SMALL LETTER E WITH DOUBLE GRAVE", ""] +, ["Einvertedbreve", 518, "LATIN CAPITAL LETTER E WITH INVERTED BREVE", ""] +, ["einvertedbreve", 519, "LATIN SMALL LETTER E WITH INVERTED BREVE", ""] +, ["Idblgrave", 520, "LATIN CAPITAL LETTER I WITH DOUBLE GRAVE", ""] +, ["idblgrave", 521, "LATIN SMALL LETTER I WITH DOUBLE GRAVE", ""] +, ["Iinvertedbreve", 522, "LATIN CAPITAL LETTER I WITH INVERTED BREVE", ""] +, ["iinvertedbreve", 523, "LATIN SMALL LETTER I WITH INVERTED BREVE", ""] +, ["Odblgrave", 524, "LATIN CAPITAL LETTER O WITH DOUBLE GRAVE", ""] +, ["odblgrave", 525, "LATIN SMALL LETTER O WITH DOUBLE GRAVE", ""] +, ["Oinvertedbreve", 526, "LATIN CAPITAL LETTER O WITH INVERTED BREVE", ""] +, ["oinvertedbreve", 527, "LATIN SMALL LETTER O WITH INVERTED BREVE", ""] +, ["Rdblgrave", 528, "LATIN CAPITAL LETTER R WITH DOUBLE GRAVE", ""] +, ["rdblgrave", 529, "LATIN SMALL LETTER R WITH DOUBLE GRAVE", ""] +, ["Rinvertedbreve", 530, "LATIN CAPITAL LETTER R WITH INVERTED BREVE", ""] +, ["rinvertedbreve", 531, "LATIN SMALL LETTER R WITH INVERTED BREVE", ""] +, ["Udblgrave", 532, "LATIN CAPITAL LETTER U WITH DOUBLE GRAVE", ""] +, ["udblgrave", 533, "LATIN SMALL LETTER U WITH DOUBLE GRAVE", ""] +, ["Uinvertedbreve", 534, "LATIN CAPITAL LETTER U WITH INVERTED BREVE", ""] +, ["uinvertedbreve", 535, "LATIN SMALL LETTER U WITH INVERTED BREVE", ""] +, ["Upsilonacutehooksymbolgreek", 979, "GREEK UPSILON WITH ACUTE AND HOOK SYMBOL", ""] +, ["Upsilondieresishooksymbolgreek", 980, "GREEK UPSILON WITH DIAERESIS AND HOOK SYMBOL", ""] +, ["sigmalunatesymbolgreek", 1010, "GREEK LUNATE SIGMA SYMBOL", ""] +, ["yotgreek", 1011, "GREEK LETTER YOT", ""] +, ["Bdotaccent", 7682, "LATIN CAPITAL LETTER B WITH DOT ABOVE", ""] +, ["bdotaccent", 7683, "LATIN SMALL LETTER B WITH DOT ABOVE", ""] +, ["Bdotbelow", 7684, "LATIN CAPITAL LETTER B WITH DOT BELOW", ""] +, ["bdotbelow", 7685, "LATIN SMALL LETTER B WITH DOT BELOW", ""] +, ["Blinebelow", 7686, "LATIN CAPITAL LETTER B WITH LINE BELOW", ""] +, ["blinebelow", 7687, "LATIN SMALL LETTER B WITH LINE BELOW", ""] +, ["Ccedillaacute", 7688, "LATIN CAPITAL LETTER C WITH CEDILLA AND ACUTE", ""] +, ["ccedillaacute", 7689, "LATIN SMALL LETTER C WITH CEDILLA AND ACUTE", ""] +, ["Ddotaccent", 7690, "LATIN CAPITAL LETTER D WITH DOT ABOVE", ""] +, ["ddotaccent", 7691, "LATIN SMALL LETTER D WITH DOT ABOVE", ""] +, ["Ddotbelow", 7692, "LATIN CAPITAL LETTER D WITH DOT BELOW", ""] +, ["ddotbelow", 7693, "LATIN SMALL LETTER D WITH DOT BELOW", ""] +, ["Dlinebelow", 7694, "LATIN CAPITAL LETTER D WITH LINE BELOW", ""] +, ["dlinebelow", 7695, "LATIN SMALL LETTER D WITH LINE BELOW", ""] +, ["Dcedilla", 7696, "LATIN CAPITAL LETTER D WITH CEDILLA", ""] +, ["dcedilla", 7697, "LATIN SMALL LETTER D WITH CEDILLA", ""] +, ["Dcircumflexbelow", 7698, "LATIN CAPITAL LETTER D WITH CIRCUMFLEX BELOW", ""] +, ["dcircumflexbelow", 7699, "LATIN SMALL LETTER D WITH CIRCUMFLEX BELOW", ""] +, ["Emacrongrave", 7700, "LATIN CAPITAL LETTER E WITH MACRON AND GRAVE", ""] +, ["emacrongrave", 7701, "LATIN SMALL LETTER E WITH MACRON AND GRAVE", ""] +, ["Emacronacute", 7702, "LATIN CAPITAL LETTER E WITH MACRON AND ACUTE", ""] +, ["emacronacute", 7703, "LATIN SMALL LETTER E WITH MACRON AND ACUTE", ""] +, ["Ecircumflexbelow", 7704, "LATIN CAPITAL LETTER E WITH CIRCUMFLEX BELOW", ""] +, ["ecircumflexbelow", 7705, "LATIN SMALL LETTER E WITH CIRCUMFLEX BELOW", ""] +, ["Etildebelow", 7706, "LATIN CAPITAL LETTER E WITH TILDE BELOW", ""] +, ["etildebelow", 7707, "LATIN SMALL LETTER E WITH TILDE BELOW", ""] +, ["Ecedillabreve", 7708, "LATIN CAPITAL LETTER E WITH CEDILLA AND BREVE", ""] +, ["ecedillabreve", 7709, "LATIN SMALL LETTER E WITH CEDILLA AND BREVE", ""] +, ["Fdotaccent", 7710, "LATIN CAPITAL LETTER F WITH DOT ABOVE", ""] +, ["fdotaccent", 7711, "LATIN SMALL LETTER F WITH DOT ABOVE", ""] +, ["Gmacron", 7712, "LATIN CAPITAL LETTER G WITH MACRON", ""] +, ["gmacron", 7713, "LATIN SMALL LETTER G WITH MACRON", ""] +, ["Hdotaccent", 7714, "LATIN CAPITAL LETTER H WITH DOT ABOVE", ""] +, ["hdotaccent", 7715, "LATIN SMALL LETTER H WITH DOT ABOVE", ""] +, ["Hdotbelow", 7716, "LATIN CAPITAL LETTER H WITH DOT BELOW", ""] +, ["hdotbelow", 7717, "LATIN SMALL LETTER H WITH DOT BELOW", ""] +, ["Hdieresis", 7718, "LATIN CAPITAL LETTER H WITH DIAERESIS", ""] +, ["hdieresis", 7719, "LATIN SMALL LETTER H WITH DIAERESIS", ""] +, ["Hbrevebelow", 7722, "LATIN CAPITAL LETTER H WITH BREVE BELOW", ""] +, ["hbrevebelow", 7723, "LATIN SMALL LETTER H WITH BREVE BELOW", ""] +, ["Kacute", 7728, "LATIN CAPITAL LETTER K WITH ACUTE", ""] +, ["kacute", 7729, "LATIN SMALL LETTER K WITH ACUTE", ""] +, ["Kdotbelow", 7730, "LATIN CAPITAL LETTER K WITH DOT BELOW", ""] +, ["kdotbelow", 7731, "LATIN SMALL LETTER K WITH DOT BELOW", ""] +, ["Klinebelow", 7732, "LATIN CAPITAL LETTER K WITH LINE BELOW", ""] +, ["klinebelow", 7733, "LATIN SMALL LETTER K WITH LINE BELOW", ""] +, ["Ldotbelow", 7734, "LATIN CAPITAL LETTER L WITH DOT BELOW", ""] +, ["ldotbelow", 7735, "LATIN SMALL LETTER L WITH DOT BELOW", ""] +, ["Ldotbelowmacron", 7736, "LATIN CAPITAL LETTER L WITH DOT BELOW AND MACRON", ""] +, ["ldotbelowmacron", 7737, "LATIN SMALL LETTER L WITH DOT BELOW AND MACRON", ""] +, ["Llinebelow", 7738, "LATIN CAPITAL LETTER L WITH LINE BELOW", ""] +, ["llinebelow", 7739, "LATIN SMALL LETTER L WITH LINE BELOW", ""] +, ["Lcircumflexbelow", 7740, "LATIN CAPITAL LETTER L WITH CIRCUMFLEX BELOW", ""] +, ["lcircumflexbelow", 7741, "LATIN SMALL LETTER L WITH CIRCUMFLEX BELOW", ""] +, ["Mdotaccent", 7744, "LATIN CAPITAL LETTER M WITH DOT ABOVE", ""] +, ["mdotaccent", 7745, "LATIN SMALL LETTER M WITH DOT ABOVE", ""] +, ["Mdotbelow", 7746, "LATIN CAPITAL LETTER M WITH DOT BELOW", ""] +, ["mdotbelow", 7747, "LATIN SMALL LETTER M WITH DOT BELOW", ""] +, ["Ndotaccent", 7748, "LATIN CAPITAL LETTER N WITH DOT ABOVE", ""] +, ["ndotaccent", 7749, "LATIN SMALL LETTER N WITH DOT ABOVE", ""] +, ["Ndotbelow", 7750, "LATIN CAPITAL LETTER N WITH DOT BELOW", ""] +, ["ndotbelow", 7751, "LATIN SMALL LETTER N WITH DOT BELOW", ""] +, ["Nlinebelow", 7752, "LATIN CAPITAL LETTER N WITH LINE BELOW", ""] +, ["nlinebelow", 7753, "LATIN SMALL LETTER N WITH LINE BELOW", ""] +, ["Ncircumflexbelow", 7754, "LATIN CAPITAL LETTER N WITH CIRCUMFLEX BELOW", ""] +, ["ncircumflexbelow", 7755, "LATIN SMALL LETTER N WITH CIRCUMFLEX BELOW", ""] +, ["Otildeacute", 7756, "LATIN CAPITAL LETTER O WITH TILDE AND ACUTE", ""] +, ["otildeacute", 7757, "LATIN SMALL LETTER O WITH TILDE AND ACUTE", ""] +, ["Otildedieresis", 7758, "LATIN CAPITAL LETTER O WITH TILDE AND DIAERESIS", ""] +, ["otildedieresis", 7759, "LATIN SMALL LETTER O WITH TILDE AND DIAERESIS", ""] +, ["Omacrongrave", 7760, "LATIN CAPITAL LETTER O WITH MACRON AND GRAVE", ""] +, ["omacrongrave", 7761, "LATIN SMALL LETTER O WITH MACRON AND GRAVE", ""] +, ["Omacronacute", 7762, "LATIN CAPITAL LETTER O WITH MACRON AND ACUTE", ""] +, ["omacronacute", 7763, "LATIN SMALL LETTER O WITH MACRON AND ACUTE", ""] +, ["Pacute", 7764, "LATIN CAPITAL LETTER P WITH ACUTE", ""] +, ["pacute", 7765, "LATIN SMALL LETTER P WITH ACUTE", ""] +, ["Pdotaccent", 7766, "LATIN CAPITAL LETTER P WITH DOT ABOVE", ""] +, ["pdotaccent", 7767, "LATIN SMALL LETTER P WITH DOT ABOVE", ""] +, ["Rdotaccent", 7768, "LATIN CAPITAL LETTER R WITH DOT ABOVE", ""] +, ["rdotaccent", 7769, "LATIN SMALL LETTER R WITH DOT ABOVE", ""] +, ["Rdotbelow", 7770, "LATIN CAPITAL LETTER R WITH DOT BELOW", ""] +, ["rdotbelow", 7771, "LATIN SMALL LETTER R WITH DOT BELOW", ""] +, ["Rdotbelowmacron", 7772, "LATIN CAPITAL LETTER R WITH DOT BELOW AND MACRON", ""] +, ["rdotbelowmacron", 7773, "LATIN SMALL LETTER R WITH DOT BELOW AND MACRON", ""] +, ["Rlinebelow", 7774, "LATIN CAPITAL LETTER R WITH LINE BELOW", ""] +, ["rlinebelow", 7775, "LATIN SMALL LETTER R WITH LINE BELOW", ""] +, ["Sdotaccent", 7776, "LATIN CAPITAL LETTER S WITH DOT ABOVE", ""] +, ["sdotaccent", 7777, "LATIN SMALL LETTER S WITH DOT ABOVE", ""] +, ["Sdotbelow", 7778, "LATIN CAPITAL LETTER S WITH DOT BELOW", ""] +, ["sdotbelow", 7779, "LATIN SMALL LETTER S WITH DOT BELOW", ""] +, ["Sacutedotaccent", 7780, "LATIN CAPITAL LETTER S WITH ACUTE AND DOT ABOVE", ""] +, ["sacutedotaccent", 7781, "LATIN SMALL LETTER S WITH ACUTE AND DOT ABOVE", ""] +, ["Scarondotaccent", 7782, "LATIN CAPITAL LETTER S WITH CARON AND DOT ABOVE", ""] +, ["scarondotaccent", 7783, "LATIN SMALL LETTER S WITH CARON AND DOT ABOVE", ""] +, ["Sdotbelowdotaccent", 7784, "LATIN CAPITAL LETTER S WITH DOT BELOW AND DOT ABOVE", ""] +, ["sdotbelowdotaccent", 7785, "LATIN SMALL LETTER S WITH DOT BELOW AND DOT ABOVE", ""] +, ["Tdotaccent", 7786, "LATIN CAPITAL LETTER T WITH DOT ABOVE", ""] +, ["tdotaccent", 7787, "LATIN SMALL LETTER T WITH DOT ABOVE", ""] +, ["Tdotbelow", 7788, "LATIN CAPITAL LETTER T WITH DOT BELOW", ""] +, ["tdotbelow", 7789, "LATIN SMALL LETTER T WITH DOT BELOW", ""] +, ["Tlinebelow", 7790, "LATIN CAPITAL LETTER T WITH LINE BELOW", ""] +, ["tlinebelow", 7791, "LATIN SMALL LETTER T WITH LINE BELOW", ""] +, ["Tcircumflexbelow", 7792, "LATIN CAPITAL LETTER T WITH CIRCUMFLEX BELOW", ""] +, ["tcircumflexbelow", 7793, "LATIN SMALL LETTER T WITH CIRCUMFLEX BELOW", ""] +, ["Udieresisbelow", 7794, "LATIN CAPITAL LETTER U WITH DIAERESIS BELOW", ""] +, ["udieresisbelow", 7795, "LATIN SMALL LETTER U WITH DIAERESIS BELOW", ""] +, ["Utildebelow", 7796, "LATIN CAPITAL LETTER U WITH TILDE BELOW", ""] +, ["utildebelow", 7797, "LATIN SMALL LETTER U WITH TILDE BELOW", ""] +, ["Ucircumflexbelow", 7798, "LATIN CAPITAL LETTER U WITH CIRCUMFLEX BELOW", ""] +, ["ucircumflexbelow", 7799, "LATIN SMALL LETTER U WITH CIRCUMFLEX BELOW", ""] +, ["Utildeacute", 7800, "LATIN CAPITAL LETTER U WITH TILDE AND ACUTE", ""] +, ["utildeacute", 7801, "LATIN SMALL LETTER U WITH TILDE AND ACUTE", ""] +, ["Umacrondieresis", 7802, "LATIN CAPITAL LETTER U WITH MACRON AND DIAERESIS", ""] +, ["umacrondieresis", 7803, "LATIN SMALL LETTER U WITH MACRON AND DIAERESIS", ""] +, ["Vtilde", 7804, "LATIN CAPITAL LETTER V WITH TILDE", ""] +, ["vtilde", 7805, "LATIN SMALL LETTER V WITH TILDE", ""] +, ["Vdotbelow", 7806, "LATIN CAPITAL LETTER V WITH DOT BELOW", ""] +, ["vdotbelow", 7807, "LATIN SMALL LETTER V WITH DOT BELOW", ""] +, ["Wdotaccent", 7814, "LATIN CAPITAL LETTER W WITH DOT ABOVE", ""] +, ["wdotaccent", 7815, "LATIN SMALL LETTER W WITH DOT ABOVE", ""] +, ["Wdotbelow", 7816, "LATIN CAPITAL LETTER W WITH DOT BELOW", ""] +, ["wdotbelow", 7817, "LATIN SMALL LETTER W WITH DOT BELOW", ""] +, ["Xdotaccent", 7818, "LATIN CAPITAL LETTER X WITH DOT ABOVE", ""] +, ["xdotaccent", 7819, "LATIN SMALL LETTER X WITH DOT ABOVE", ""] +, ["Xdieresis", 7820, "LATIN CAPITAL LETTER X WITH DIAERESIS", ""] +, ["xdieresis", 7821, "LATIN SMALL LETTER X WITH DIAERESIS", ""] +, ["Ydotaccent", 7822, "LATIN CAPITAL LETTER Y WITH DOT ABOVE", ""] +, ["ydotaccent", 7823, "LATIN SMALL LETTER Y WITH DOT ABOVE", ""] +, ["Zcircumflex", 7824, "LATIN CAPITAL LETTER Z WITH CIRCUMFLEX", ""] +, ["zcircumflex", 7825, "LATIN SMALL LETTER Z WITH CIRCUMFLEX", ""] +, ["Zdotbelow", 7826, "LATIN CAPITAL LETTER Z WITH DOT BELOW", ""] +, ["zdotbelow", 7827, "LATIN SMALL LETTER Z WITH DOT BELOW", ""] +, ["Zlinebelow", 7828, "LATIN CAPITAL LETTER Z WITH LINE BELOW", ""] +, ["zlinebelow", 7829, "LATIN SMALL LETTER Z WITH LINE BELOW", ""] +, ["hlinebelow", 7830, "LATIN SMALL LETTER H WITH LINE BELOW", ""] +, ["tdieresis", 7831, "LATIN SMALL LETTER T WITH DIAERESIS", ""] +, ["wring", 7832, "LATIN SMALL LETTER W WITH RING ABOVE", ""] +, ["yring", 7833, "LATIN SMALL LETTER Y WITH RING ABOVE", ""] +, ["arighthalfring", 7834, "LATIN SMALL LETTER A WITH RIGHT HALF RING", ""] +, ["placeofinterestsign.specz.medium", null, null, "#dddddd"] +]} diff --git a/docs/lab/index.html b/docs/lab/index.html new file mode 100644 index 000000000..9de7b8a81 --- /dev/null +++ b/docs/lab/index.html @@ -0,0 +1,1635 @@ + + + + + + + 🔠 + + + + + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + Rectangle + Rectangle + Rectangle + Rectangle + + + + Rectangle + Rectangle + Rectangle + Rectangle + + +
+ + + + + + + +
+ +
Åj
+ + + + + diff --git a/docs/lab/words-google-10000-english-usa-no-swears.json b/docs/lab/words-google-10000-english-usa-no-swears.json new file mode 100644 index 000000000..461d06ba7 --- /dev/null +++ b/docs/lab/words-google-10000-english-usa-no-swears.json @@ -0,0 +1 @@ +["the", "of", "and", "to", "a", "in", "for", "is", "on", "that", "by", "this", "with", "i", "you", "it", "not", "or", "be", "are", "from", "at", "as", "your", "all", "have", "new", "more", "an", "was", "we", "will", "home", "can", "us", "about", "if", "page", "my", "has", "search", "free", "but", "our", "one", "other", "do", "no", "information", "time", "they", "site", "he", "up", "may", "what", "which", "their", "news", "out", "use", "any", "there", "see", "only", "so", "his", "when", "contact", "here", "business", "who", "web", "also", "now", "help", "get", "pm", "view", "online", "c", "e", "first", "am", "been", "would", "how", "were", "me", "s", "services", "some", "these", "click", "its", "like", "service", "x", "than", "find", "price", "date", "back", "top", "people", "had", "list", "name", "just", "over", "state", "year", "day", "into", "email", "two", "health", "n", "world", "re", "next", "used", "go", "b", "work", "last", "most", "products", "music", "buy", "data", "make", "them", "should", "product", "system", "post", "her", "city", "t", "add", "policy", "number", "such", "please", "available", "copyright", "support", "message", "after", "best", "software", "then", "jan", "good", "video", "well", "d", "where", "info", "rights", "public", "books", "high", "school", "through", "m", "each", "links", "she", "review", "years", "order", "very", "privacy", "book", "items", "company", "r", "read", "group", "need", "many", "user", "said", "de", "does", "set", "under", "general", "research", "university", "january", "mail", "full", "map", "reviews", "program", "life", "know", "games", "way", "days", "management", "p", "part", "could", "great", "united", "hotel", "real", "f", "item", "international", "center", "ebay", "must", "store", "travel", "comments", "made", "development", "report", "off", "member", "details", "line", "terms", "before", "hotels", "did", "send", "right", "type", "because", "local", "those", "using", "results", "office", "education", "national", "car", "design", "take", "posted", "internet", "address", "community", "within", "states", "area", "want", "phone", "dvd", "shipping", "reserved", "subject", "between", "forum", "family", "l", "long", "based", "w", "code", "show", "o", "even", "black", "check", "special", "prices", "website", "index", "being", "women", "much", "sign", "file", "link", "open", "today", "technology", "south", "case", "project", "same", "pages", "uk", "version", "section", "own", "found", "sports", "house", "related", "security", "both", "g", "county", "american", "photo", "game", "members", "power", "while", "care", "network", "down", "computer", "systems", "three", "total", "place", "end", "following", "download", "h", "him", "without", "per", "access", "think", "north", "resources", "current", "posts", "big", "media", "law", "control", "water", "history", "pictures", "size", "art", "personal", "since", "including", "guide", "shop", "directory", "board", "location", "change", "white", "text", "small", "rating", "rate", "government", "children", "during", "usa", "return", "students", "v", "shopping", "account", "times", "sites", "level", "digital", "profile", "previous", "form", "events", "love", "old", "john", "main", "call", "hours", "image", "department", "title", "description", "non", "k", "y", "insurance", "another", "why", "shall", "property", "class", "cd", "still", "money", "quality", "every", "listing", "content", "country", "private", "little", "visit", "save", "tools", "low", "reply", "customer", "december", "compare", "movies", "include", "college", "value", "article", "york", "man", "card", "jobs", "provide", "j", "food", "source", "author", "different", "press", "u", "learn", "sale", "around", "print", "course", "job", "canada", "process", "teen", "room", "stock", "training", "too", "credit", "point", "join", "science", "men", "categories", "advanced", "west", "sales", "look", "english", "left", "team", "estate", "box", "conditions", "select", "windows", "photos", "gay", "thread", "week", "category", "note", "live", "large", "gallery", "table", "register", "however", "june", "october", "november", "market", "library", "really", "action", "start", "series", "model", "features", "air", "industry", "plan", "human", "provided", "tv", "yes", "required", "second", "hot", "accessories", "cost", "movie", "forums", "march", "la", "september", "better", "say", "questions", "july", "yahoo", "going", "medical", "test", "friend", "come", "dec", "server", "pc", "study", "application", "cart", "staff", "articles", "san", "feedback", "again", "play", "looking", "issues", "april", "never", "users", "complete", "street", "topic", "comment", "financial", "things", "working", "against", "standard", "tax", "person", "below", "mobile", "less", "got", "blog", "party", "payment", "equipment", "login", "student", "let", "programs", "offers", "legal", "above", "recent", "park", "stores", "side", "act", "problem", "red", "give", "memory", "performance", "social", "q", "august", "quote", "language", "story", "sell", "options", "experience", "rates", "create", "key", "body", "young", "america", "important", "field", "few", "east", "paper", "single", "ii", "age", "activities", "club", "example", "girls", "additional", "password", "z", "latest", "something", "road", "gift", "question", "changes", "night", "ca", "hard", "texas", "oct", "pay", "four", "poker", "status", "browse", "issue", "range", "building", "seller", "court", "february", "always", "result", "audio", "light", "write", "war", "nov", "offer", "blue", "groups", "al", "easy", "given", "files", "event", "release", "analysis", "request", "fax", "china", "making", "picture", "needs", "possible", "might", "professional", "yet", "month", "major", "star", "areas", "future", "space", "committee", "hand", "sun", "cards", "problems", "london", "washington", "meeting", "rss", "become", "interest", "id", "child", "keep", "enter", "california", "share", "similar", "garden", "schools", "million", "added", "reference", "companies", "listed", "baby", "learning", "energy", "run", "delivery", "net", "popular", "term", "film", "stories", "put", "computers", "journal", "reports", "co", "try", "welcome", "central", "images", "president", "notice", "original", "head", "radio", "until", "cell", "color", "self", "council", "away", "includes", "track", "australia", "discussion", "archive", "once", "others", "entertainment", "agreement", "format", "least", "society", "months", "log", "safety", "friends", "sure", "faq", "trade", "edition", "cars", "messages", "marketing", "tell", "further", "updated", "association", "able", "having", "provides", "david", "fun", "already", "green", "studies", "close", "common", "drive", "specific", "several", "gold", "feb", "living", "sep", "collection", "called", "short", "arts", "lot", "ask", "display", "limited", "powered", "solutions", "means", "director", "daily", "beach", "past", "natural", "whether", "due", "et", "electronics", "five", "upon", "period", "planning", "database", "says", "official", "weather", "mar", "land", "average", "done", "technical", "window", "france", "pro", "region", "island", "record", "direct", "microsoft", "conference", "environment", "records", "st", "district", "calendar", "costs", "style", "url", "front", "statement", "update", "parts", "aug", "ever", "downloads", "early", "miles", "sound", "resource", "present", "applications", "either", "ago", "document", "word", "works", "material", "bill", "apr", "written", "talk", "federal", "hosting", "rules", "final", "adult", "tickets", "thing", "centre", "requirements", "via", "cheap", "kids", "finance", "true", "minutes", "else", "mark", "third", "rock", "gifts", "europe", "reading", "topics", "bad", "individual", "tips", "plus", "auto", "cover", "usually", "edit", "together", "videos", "percent", "fast", "function", "fact", "unit", "getting", "global", "tech", "meet", "far", "economic", "en", "player", "projects", "lyrics", "often", "subscribe", "submit", "germany", "amount", "watch", "included", "feel", "though", "bank", "risk", "thanks", "everything", "deals", "various", "words", "linux", "jul", "production", "commercial", "james", "weight", "town", "heart", "advertising", "received", "choose", "treatment", "newsletter", "archives", "points", "knowledge", "magazine", "error", "camera", "jun", "girl", "currently", "construction", "toys", "registered", "clear", "golf", "receive", "domain", "methods", "chapter", "makes", "protection", "policies", "loan", "wide", "beauty", "manager", "india", "position", "taken", "sort", "listings", "models", "michael", "known", "half", "cases", "step", "engineering", "florida", "simple", "quick", "none", "wireless", "license", "paul", "friday", "lake", "whole", "annual", "published", "later", "basic", "sony", "shows", "corporate", "google", "church", "method", "purchase", "customers", "active", "response", "practice", "hardware", "figure", "materials", "fire", "holiday", "chat", "enough", "designed", "along", "among", "death", "writing", "speed", "html", "countries", "loss", "face", "brand", "discount", "higher", "effects", "created", "remember", "standards", "oil", "bit", "yellow", "political", "increase", "advertise", "kingdom", "base", "near", "environmental", "thought", "stuff", "french", "storage", "oh", "japan", "doing", "loans", "shoes", "entry", "stay", "nature", "orders", "availability", "africa", "summary", "turn", "mean", "growth", "notes", "agency", "king", "monday", "european", "activity", "copy", "although", "drug", "pics", "western", "income", "force", "cash", "employment", "overall", "bay", "river", "commission", "ad", "package", "contents", "seen", "players", "engine", "port", "album", "regional", "stop", "supplies", "started", "administration", "bar", "institute", "views", "plans", "double", "dog", "build", "screen", "exchange", "types", "soon", "sponsored", "lines", "electronic", "continue", "across", "benefits", "needed", "season", "apply", "someone", "held", "ny", "anything", "printer", "condition", "effective", "believe", "organization", "effect", "asked", "eur", "mind", "sunday", "selection", "casino", "pdf", "lost", "tour", "menu", "volume", "cross", "anyone", "mortgage", "hope", "silver", "corporation", "wish", "inside", "solution", "mature", "role", "rather", "weeks", "addition", "came", "supply", "nothing", "certain", "usr", "executive", "running", "lower", "necessary", "union", "jewelry", "according", "dc", "clothing", "mon", "com", "particular", "fine", "names", "robert", "homepage", "hour", "gas", "skills", "six", "bush", "islands", "advice", "career", "military", "rental", "decision", "leave", "british", "teens", "pre", "huge", "sat", "woman", "facilities", "zip", "bid", "kind", "sellers", "middle", "move", "cable", "opportunities", "taking", "values", "division", "coming", "tuesday", "object", "lesbian", "appropriate", "machine", "logo", "length", "actually", "nice", "score", "statistics", "client", "ok", "returns", "capital", "follow", "sample", "investment", "sent", "shown", "saturday", "christmas", "england", "culture", "band", "flash", "ms", "lead", "george", "choice", "went", "starting", "registration", "fri", "thursday", "courses", "consumer", "hi", "airport", "foreign", "artist", "outside", "furniture", "levels", "channel", "letter", "mode", "phones", "ideas", "wednesday", "structure", "fund", "summer", "allow", "degree", "contract", "button", "releases", "wed", "homes", "super", "male", "matter", "custom", "virginia", "almost", "took", "located", "multiple", "asian", "distribution", "editor", "inn", "industrial", "cause", "potential", "song", "cnet", "ltd", "los", "hp", "focus", "late", "fall", "featured", "idea", "rooms", "female", "responsible", "inc", "communications", "win", "associated", "thomas", "primary", "cancer", "numbers", "reason", "tool", "browser", "spring", "foundation", "answer", "voice", "eg", "friendly", "schedule", "documents", "communication", "purpose", "feature", "bed", "comes", "police", "everyone", "independent", "ip", "approach", "cameras", "brown", "physical", "operating", "hill", "maps", "medicine", "deal", "hold", "ratings", "chicago", "forms", "glass", "happy", "tue", "smith", "wanted", "developed", "thank", "safe", "unique", "survey", "prior", "telephone", "sport", "ready", "feed", "animal", "sources", "mexico", "population", "pa", "regular", "secure", "navigation", "operations", "therefore", "simply", "evidence", "station", "christian", "round", "paypal", "favorite", "understand", "option", "master", "valley", "recently", "probably", "thu", "rentals", "sea", "built", "publications", "blood", "cut", "worldwide", "improve", "connection", "publisher", "hall", "larger", "anti", "networks", "earth", "parents", "nokia", "impact", "transfer", "introduction", "kitchen", "strong", "tel", "carolina", "wedding", "properties", "hospital", "ground", "overview", "ship", "accommodation", "owners", "disease", "tx", "excellent", "paid", "italy", "perfect", "hair", "opportunity", "kit", "classic", "basis", "command", "cities", "william", "express", "award", "distance", "tree", "peter", "assessment", "ensure", "thus", "wall", "ie", "involved", "el", "extra", "especially", "interface", "partners", "budget", "rated", "guides", "success", "maximum", "ma", "operation", "existing", "quite", "selected", "boy", "amazon", "patients", "restaurants", "beautiful", "warning", "wine", "locations", "horse", "vote", "forward", "flowers", "stars", "significant", "lists", "technologies", "owner", "retail", "animals", "useful", "directly", "manufacturer", "ways", "est", "son", "providing", "rule", "mac", "housing", "takes", "iii", "gmt", "bring", "catalog", "searches", "max", "trying", "mother", "authority", "considered", "told", "xml", "traffic", "programme", "joined", "input", "strategy", "feet", "agent", "valid", "bin", "modern", "senior", "ireland", "teaching", "door", "grand", "testing", "trial", "charge", "units", "instead", "canadian", "cool", "normal", "wrote", "enterprise", "ships", "entire", "educational", "md", "leading", "metal", "positive", "fl", "fitness", "chinese", "opinion", "mb", "asia", "football", "abstract", "uses", "output", "funds", "mr", "greater", "likely", "develop", "employees", "artists", "alternative", "processing", "responsibility", "resolution", "java", "guest", "seems", "publication", "pass", "relations", "trust", "van", "contains", "session", "multi", "photography", "republic", "fees", "components", "vacation", "century", "academic", "assistance", "completed", "skin", "graphics", "indian", "prev", "ads", "mary", "il", "expected", "ring", "grade", "dating", "pacific", "mountain", "organizations", "pop", "filter", "mailing", "vehicle", "longer", "consider", "int", "northern", "behind", "panel", "floor", "german", "buying", "match", "proposed", "default", "require", "iraq", "boys", "outdoor", "deep", "morning", "otherwise", "allows", "rest", "protein", "plant", "reported", "hit", "transportation", "mm", "pool", "mini", "politics", "partner", "disclaimer", "authors", "boards", "faculty", "parties", "fish", "membership", "mission", "eye", "string", "sense", "modified", "pack", "released", "stage", "internal", "goods", "recommended", "born", "unless", "richard", "detailed", "japanese", "race", "approved", "background", "target", "except", "character", "usb", "maintenance", "ability", "maybe", "functions", "ed", "moving", "brands", "places", "php", "pretty", "trademarks", "phentermine", "spain", "southern", "yourself", "etc", "winter", "battery", "youth", "pressure", "submitted", "boston", "debt", "keywords", "medium", "television", "interested", "core", "break", "purposes", "throughout", "sets", "dance", "wood", "msn", "itself", "defined", "papers", "playing", "awards", "fee", "studio", "reader", "virtual", "device", "established", "answers", "rent", "las", "remote", "dark", "programming", "external", "apple", "le", "regarding", "instructions", "min", "offered", "theory", "enjoy", "remove", "aid", "surface", "minimum", "visual", "host", "variety", "teachers", "isbn", "martin", "manual", "block", "subjects", "agents", "increased", "repair", "fair", "civil", "steel", "understanding", "songs", "fixed", "wrong", "beginning", "hands", "associates", "finally", "az", "updates", "desktop", "classes", "paris", "ohio", "gets", "sector", "capacity", "requires", "jersey", "un", "fat", "fully", "father", "electric", "saw", "instruments", "quotes", "officer", "driver", "businesses", "dead", "respect", "unknown", "specified", "restaurant", "mike", "trip", "pst", "worth", "mi", "procedures", "poor", "teacher", "eyes", "relationship", "workers", "farm", "georgia", "peace", "traditional", "campus", "tom", "showing", "creative", "coast", "benefit", "progress", "funding", "devices", "lord", "grant", "sub", "agree", "fiction", "hear", "sometimes", "watches", "careers", "beyond", "goes", "families", "led", "museum", "themselves", "fan", "transport", "interesting", "blogs", "wife", "evaluation", "accepted", "former", "implementation", "ten", "hits", "zone", "complex", "th", "cat", "galleries", "references", "die", "presented", "jack", "flat", "flow", "agencies", "literature", "respective", "parent", "spanish", "michigan", "columbia", "setting", "dr", "scale", "stand", "economy", "highest", "helpful", "monthly", "critical", "frame", "musical", "definition", "secretary", "angeles", "networking", "path", "australian", "employee", "chief", "gives", "kb", "bottom", "magazines", "packages", "detail", "francisco", "laws", "changed", "pet", "heard", "begin", "individuals", "colorado", "royal", "clean", "switch", "russian", "largest", "african", "guy", "titles", "relevant", "guidelines", "justice", "connect", "bible", "dev", "cup", "basket", "applied", "weekly", "vol", "installation", "described", "demand", "pp", "suite", "vegas", "na", "square", "chris", "attention", "advance", "skip", "diet", "army", "auction", "gear", "lee", "os", "difference", "allowed", "correct", "charles", "nation", "selling", "lots", "piece", "sheet", "firm", "seven", "older", "illinois", "regulations", "elements", "species", "jump", "cells", "module", "resort", "facility", "random", "pricing", "dvds", "certificate", "minister", "motion", "looks", "fashion", "directions", "visitors", "documentation", "monitor", "trading", "forest", "calls", "whose", "coverage", "couple", "giving", "chance", "vision", "ball", "ending", "clients", "actions", "listen", "discuss", "accept", "automotive", "naked", "goal", "successful", "sold", "wind", "communities", "clinical", "situation", "sciences", "markets", "lowest", "highly", "publishing", "appear", "emergency", "developing", "lives", "currency", "leather", "determine", "temperature", "palm", "announcements", "patient", "actual", "historical", "stone", "bob", "commerce", "ringtones", "perhaps", "persons", "difficult", "scientific", "satellite", "fit", "tests", "village", "accounts", "amateur", "ex", "met", "pain", "xbox", "particularly", "factors", "coffee", "www", "settings", "buyer", "cultural", "steve", "easily", "oral", "ford", "poster", "edge", "functional", "root", "au", "fi", "closed", "holidays", "ice", "pink", "zealand", "balance", "monitoring", "graduate", "replies", "shot", "nc", "architecture", "initial", "label", "thinking", "scott", "llc", "sec", "recommend", "canon", "league", "waste", "minute", "bus", "provider", "optional", "dictionary", "cold", "accounting", "manufacturing", "sections", "chair", "fishing", "effort", "phase", "fields", "bag", "fantasy", "po", "letters", "motor", "va", "professor", "context", "install", "shirt", "apparel", "generally", "continued", "foot", "mass", "crime", "count", "breast", "techniques", "ibm", "rd", "johnson", "sc", "quickly", "dollars", "websites", "religion", "claim", "driving", "permission", "surgery", "patch", "heat", "wild", "measures", "generation", "kansas", "miss", "chemical", "doctor", "task", "reduce", "brought", "himself", "nor", "component", "enable", "exercise", "bug", "santa", "mid", "guarantee", "leader", "diamond", "israel", "se", "processes", "soft", "servers", "alone", "meetings", "seconds", "jones", "arizona", "keyword", "interests", "flight", "congress", "fuel", "username", "walk", "produced", "italian", "paperback", "classifieds", "wait", "supported", "pocket", "saint", "rose", "freedom", "argument", "competition", "creating", "jim", "drugs", "joint", "premium", "providers", "fresh", "characters", "attorney", "upgrade", "di", "factor", "growing", "thousands", "km", "stream", "apartments", "pick", "hearing", "eastern", "auctions", "therapy", "entries", "dates", "generated", "signed", "upper", "administrative", "serious", "prime", "samsung", "limit", "began", "louis", "steps", "errors", "shops", "del", "efforts", "informed", "ga", "ac", "thoughts", "creek", "ft", "worked", "quantity", "urban", "practices", "sorted", "reporting", "essential", "myself", "tours", "platform", "load", "affiliate", "labor", "immediately", "admin", "nursing", "defense", "machines", "designated", "tags", "heavy", "covered", "recovery", "joe", "guys", "integrated", "configuration", "merchant", "comprehensive", "expert", "universal", "protect", "drop", "solid", "cds", "presentation", "languages", "became", "orange", "compliance", "vehicles", "prevent", "theme", "rich", "im", "campaign", "marine", "improvement", "vs", "guitar", "finding", "pennsylvania", "examples", "ipod", "saying", "spirit", "ar", "claims", "challenge", "motorola", "acceptance", "strategies", "mo", "seem", "affairs", "touch", "intended", "towards", "sa", "goals", "hire", "election", "suggest", "branch", "charges", "serve", "affiliates", "reasons", "magic", "mount", "smart", "talking", "gave", "ones", "latin", "multimedia", "xp", "avoid", "certified", "manage", "corner", "rank", "computing", "oregon", "element", "birth", "virus", "abuse", "interactive", "requests", "separate", "quarter", "procedure", "leadership", "tables", "define", "racing", "religious", "facts", "breakfast", "kong", "column", "plants", "faith", "chain", "developer", "identify", "avenue", "missing", "died", "approximately", "domestic", "sitemap", "recommendations", "moved", "houston", "reach", "comparison", "mental", "viewed", "moment", "extended", "sequence", "inch", "attack", "sorry", "centers", "opening", "damage", "lab", "reserve", "recipes", "cvs", "gamma", "plastic", "produce", "snow", "placed", "truth", "counter", "failure", "follows", "eu", "weekend", "dollar", "camp", "ontario", "automatically", "des", "minnesota", "films", "bridge", "native", "fill", "williams", "movement", "printing", "baseball", "owned", "approval", "draft", "chart", "played", "contacts", "cc", "jesus", "readers", "clubs", "lcd", "wa", "jackson", "equal", "adventure", "matching", "offering", "shirts", "profit", "leaders", "posters", "institutions", "assistant", "variable", "ave", "dj", "advertisement", "expect", "parking", "headlines", "yesterday", "compared", "determined", "wholesale", "workshop", "russia", "gone", "codes", "kinds", "extension", "seattle", "statements", "golden", "completely", "teams", "fort", "cm", "wi", "lighting", "senate", "forces", "funny", "brother", "gene", "turned", "portable", "tried", "electrical", "applicable", "disc", "returned", "pattern", "ct", "boat", "named", "theatre", "laser", "earlier", "manufacturers", "sponsor", "classical", "icon", "warranty", "dedicated", "indiana", "direction", "harry", "basketball", "objects", "ends", "delete", "evening", "assembly", "nuclear", "taxes", "mouse", "signal", "criminal", "issued", "brain", "sexual", "wisconsin", "powerful", "dream", "obtained", "false", "da", "cast", "flower", "felt", "personnel", "passed", "supplied", "identified", "falls", "pic", "soul", "aids", "opinions", "promote", "stated", "stats", "hawaii", "professionals", "appears", "carry", "flag", "decided", "nj", "covers", "hr", "em", "advantage", "hello", "designs", "maintain", "tourism", "priority", "newsletters", "adults", "clips", "savings", "iv", "graphic", "atom", "payments", "rw", "estimated", "binding", "brief", "ended", "winning", "eight", "anonymous", "iron", "straight", "script", "served", "wants", "miscellaneous", "prepared", "void", "dining", "alert", "integration", "atlanta", "dakota", "tag", "interview", "mix", "framework", "disk", "installed", "queen", "vhs", "credits", "clearly", "fix", "handle", "sweet", "desk", "criteria", "pubmed", "dave", "massachusetts", "diego", "hong", "vice", "associate", "ne", "truck", "behavior", "enlarge", "ray", "frequently", "revenue", "measure", "changing", "votes", "du", "duty", "looked", "discussions", "bear", "gain", "festival", "laboratory", "ocean", "flights", "experts", "signs", "lack", "depth", "iowa", "whatever", "logged", "laptop", "vintage", "train", "exactly", "dry", "explore", "maryland", "spa", "concept", "nearly", "eligible", "checkout", "reality", "forgot", "handling", "origin", "knew", "gaming", "feeds", "billion", "destination", "scotland", "faster", "intelligence", "dallas", "bought", "con", "ups", "nations", "route", "followed", "specifications", "broken", "tripadvisor", "frank", "alaska", "zoom", "blow", "battle", "residential", "anime", "speak", "decisions", "industries", "protocol", "query", "clip", "partnership", "editorial", "nt", "expression", "es", "equity", "provisions", "speech", "wire", "principles", "suggestions", "rural", "shared", "sounds", "replacement", "tape", "strategic", "judge", "spam", "economics", "acid", "bytes", "cent", "forced", "compatible", "fight", "apartment", "height", "null", "zero", "speaker", "filed", "gb", "netherlands", "obtain", "bc", "consulting", "recreation", "offices", "designer", "remain", "managed", "pr", "failed", "marriage", "roll", "korea", "banks", "fr", "participants", "secret", "bath", "aa", "kelly", "leads", "negative", "austin", "favorites", "toronto", "theater", "springs", "missouri", "andrew", "var", "perform", "healthy", "translation", "estimates", "font", "assets", "injury", "mt", "joseph", "ministry", "drivers", "lawyer", "figures", "married", "protected", "proposal", "sharing", "philadelphia", "portal", "waiting", "birthday", "beta", "fail", "gratis", "banking", "officials", "brian", "toward", "won", "slightly", "assist", "conduct", "contained", "lingerie", "legislation", "calling", "parameters", "jazz", "serving", "bags", "profiles", "miami", "comics", "matters", "houses", "doc", "postal", "relationships", "tennessee", "wear", "controls", "breaking", "combined", "ultimate", "wales", "representative", "frequency", "introduced", "minor", "finish", "departments", "residents", "noted", "displayed", "mom", "reduced", "physics", "rare", "spent", "performed", "extreme", "samples", "davis", "daniel", "bars", "reviewed", "row", "oz", "forecast", "removed", "helps", "singles", "administrator", "cycle", "amounts", "contain", "accuracy", "dual", "rise", "usd", "sleep", "mg", "bird", "pharmacy", "brazil", "creation", "static", "scene", "hunter", "addresses", "lady", "crystal", "famous", "writer", "chairman", "violence", "fans", "oklahoma", "speakers", "drink", "academy", "dynamic", "gender", "eat", "permanent", "agriculture", "dell", "cleaning", "constitutes", "portfolio", "practical", "delivered", "collectibles", "infrastructure", "exclusive", "seat", "concerns", "vendor", "originally", "intel", "utilities", "philosophy", "regulation", "officers", "reduction", "aim", "bids", "referred", "supports", "nutrition", "recording", "regions", "junior", "toll", "les", "cape", "ann", "rings", "meaning", "tip", "secondary", "wonderful", "mine", "ladies", "henry", "ticket", "announced", "guess", "agreed", "prevention", "whom", "ski", "soccer", "math", "import", "posting", "presence", "instant", "mentioned", "automatic", "healthcare", "viewing", "maintained", "ch", "increasing", "majority", "connected", "christ", "dan", "dogs", "sd", "directors", "aspects", "austria", "ahead", "moon", "participation", "scheme", "utility", "preview", "fly", "manner", "matrix", "containing", "combination", "devel", "amendment", "despite", "strength", "guaranteed", "turkey", "libraries", "proper", "distributed", "degrees", "singapore", "enterprises", "delta", "fear", "seeking", "inches", "phoenix", "rs", "convention", "shares", "principal", "daughter", "standing", "comfort", "colors", "wars", "cisco", "ordering", "kept", "alpha", "appeal", "cruise", "bonus", "certification", "previously", "hey", "bookmark", "buildings", "specials", "beat", "disney", "household", "batteries", "adobe", "smoking", "bbc", "becomes", "drives", "arms", "alabama", "tea", "improved", "trees", "avg", "achieve", "positions", "dress", "subscription", "dealer", "contemporary", "sky", "utah", "nearby", "rom", "carried", "happen", "exposure", "panasonic", "hide", "permalink", "signature", "gambling", "refer", "miller", "provision", "outdoors", "clothes", "caused", "luxury", "babes", "frames", "certainly", "indeed", "newspaper", "toy", "circuit", "layer", "printed", "slow", "removal", "easier", "src", "liability", "trademark", "hip", "printers", "faqs", "nine", "adding", "kentucky", "mostly", "eric", "spot", "taylor", "trackback", "prints", "spend", "factory", "interior", "revised", "grow", "americans", "optical", "promotion", "relative", "amazing", "clock", "dot", "hiv", "identity", "suites", "conversion", "feeling", "hidden", "reasonable", "victoria", "serial", "relief", "revision", "broadband", "influence", "ratio", "pda", "importance", "rain", "onto", "dsl", "planet", "webmaster", "copies", "recipe", "zum", "permit", "seeing", "proof", "dna", "diff", "tennis", "bass", "prescription", "bedroom", "empty", "instance", "hole", "pets", "ride", "licensed", "orlando", "specifically", "tim", "bureau", "maine", "sql", "represent", "conservation", "pair", "ideal", "specs", "recorded", "don", "pieces", "finished", "parks", "dinner", "lawyers", "sydney", "stress", "cream", "ss", "runs", "trends", "yeah", "discover", "ap", "patterns", "boxes", "louisiana", "hills", "javascript", "fourth", "nm", "advisor", "mn", "marketplace", "nd", "evil", "aware", "wilson", "shape", "evolution", "irish", "certificates", "objectives", "stations", "suggested", "gps", "op", "remains", "acc", "greatest", "firms", "concerned", "euro", "operator", "structures", "generic", "encyclopedia", "usage", "cap", "ink", "charts", "continuing", "mixed", "census", "interracial", "peak", "tn", "competitive", "exist", "wheel", "transit", "suppliers", "salt", "compact", "poetry", "lights", "tracking", "angel", "bell", "keeping", "preparation", "attempt", "receiving", "matches", "accordance", "width", "noise", "engines", "forget", "array", "discussed", "accurate", "stephen", "elizabeth", "climate", "reservations", "pin", "playstation", "alcohol", "greek", "instruction", "managing", "annotation", "sister", "raw", "differences", "walking", "explain", "smaller", "newest", "establish", "gnu", "happened", "expressed", "jeff", "extent", "sharp", "lesbians", "ben", "lane", "paragraph", "kill", "mathematics", "aol", "compensation", "ce", "export", "managers", "aircraft", "modules", "sweden", "conflict", "conducted", "versions", "employer", "occur", "percentage", "knows", "mississippi", "describe", "concern", "backup", "requested", "citizens", "connecticut", "heritage", "personals", "immediate", "holding", "trouble", "spread", "coach", "kevin", "agricultural", "expand", "supporting", "audience", "assigned", "jordan", "collections", "ages", "participate", "plug", "specialist", "cook", "affect", "virgin", "experienced", "investigation", "raised", "hat", "institution", "directed", "dealers", "searching", "sporting", "helping", "perl", "affected", "lib", "bike", "totally", "plate", "expenses", "indicate", "blonde", "ab", "proceedings", "transmission", "anderson", "utc", "characteristics", "der", "lose", "organic", "seek", "experiences", "albums", "cheats", "extremely", "verzeichnis", "contracts", "guests", "hosted", "diseases", "concerning", "developers", "equivalent", "chemistry", "tony", "neighborhood", "nevada", "kits", "thailand", "variables", "agenda", "anyway", "continues", "tracks", "advisory", "cam", "curriculum", "logic", "template", "prince", "circle", "soil", "grants", "anywhere", "psychology", "responses", "atlantic", "wet", "circumstances", "edward", "investor", "identification", "ram", "leaving", "wildlife", "appliances", "matt", "elementary", "cooking", "speaking", "sponsors", "fox", "unlimited", "respond", "sizes", "plain", "exit", "entered", "iran", "arm", "keys", "launch", "wave", "checking", "costa", "belgium", "printable", "holy", "acts", "guidance", "mesh", "trail", "enforcement", "symbol", "crafts", "highway", "buddy", "hardcover", "observed", "dean", "setup", "poll", "booking", "glossary", "fiscal", "celebrity", "styles", "denver", "unix", "filled", "bond", "channels", "ericsson", "appendix", "notify", "blues", "chocolate", "pub", "portion", "scope", "hampshire", "supplier", "cables", "cotton", "bluetooth", "controlled", "requirement", "authorities", "biology", "dental", "killed", "border", "ancient", "debate", "representatives", "starts", "pregnancy", "causes", "arkansas", "biography", "leisure", "attractions", "learned", "transactions", "notebook", "explorer", "historic", "attached", "opened", "tm", "husband", "disabled", "authorized", "crazy", "upcoming", "britain", "concert", "retirement", "scores", "financing", "efficiency", "sp", "comedy", "adopted", "efficient", "weblog", "linear", "commitment", "specialty", "bears", "jean", "hop", "carrier", "edited", "constant", "visa", "mouth", "jewish", "meter", "linked", "portland", "interviews", "concepts", "nh", "gun", "reflect", "pure", "deliver", "wonder", "lessons", "fruit", "begins", "qualified", "reform", "lens", "alerts", "treated", "discovery", "draw", "mysql", "classified", "relating", "assume", "confidence", "alliance", "fm", "confirm", "warm", "neither", "lewis", "howard", "offline", "leaves", "engineer", "lifestyle", "consistent", "replace", "clearance", "connections", "inventory", "converter", "organisation", "babe", "checks", "reached", "becoming", "safari", "objective", "indicated", "sugar", "crew", "legs", "sam", "stick", "securities", "allen", "pdt", "relation", "enabled", "genre", "slide", "montana", "volunteer", "tested", "rear", "democratic", "enhance", "switzerland", "exact", "bound", "parameter", "adapter", "processor", "node", "formal", "dimensions", "contribute", "lock", "hockey", "storm", "micro", "colleges", "laptops", "mile", "showed", "challenges", "editors", "mens", "threads", "bowl", "supreme", "brothers", "recognition", "presents", "ref", "tank", "submission", "dolls", "estimate", "encourage", "navy", "kid", "regulatory", "inspection", "consumers", "cancel", "limits", "territory", "transaction", "manchester", "weapons", "paint", "delay", "pilot", "outlet", "contributions", "continuous", "db", "czech", "resulting", "cambridge", "initiative", "novel", "pan", "execution", "disability", "increases", "ultra", "winner", "idaho", "contractor", "ph", "episode", "examination", "potter", "dish", "plays", "bulletin", "ia", "pt", "indicates", "modify", "oxford", "adam", "truly", "epinions", "painting", "committed", "extensive", "affordable", "universe", "candidate", "databases", "patent", "slot", "psp", "outstanding", "ha", "eating", "perspective", "planned", "watching", "lodge", "messenger", "mirror", "tournament", "consideration", "ds", "discounts", "sterling", "sessions", "kernel", "stocks", "buyers", "journals", "gray", "catalogue", "ea", "jennifer", "antonio", "charged", "broad", "taiwan", "und", "chosen", "demo", "greece", "lg", "swiss", "sarah", "clark", "hate", "terminal", "publishers", "nights", "behalf", "caribbean", "liquid", "rice", "nebraska", "loop", "salary", "reservation", "foods", "gourmet", "guard", "properly", "orleans", "saving", "nfl", "remaining", "empire", "resume", "twenty", "newly", "raise", "prepare", "avatar", "gary", "depending", "illegal", "expansion", "vary", "hundreds", "rome", "arab", "lincoln", "helped", "premier", "tomorrow", "purchased", "milk", "decide", "consent", "drama", "visiting", "performing", "downtown", "keyboard", "contest", "collected", "nw", "bands", "boot", "suitable", "ff", "absolutely", "millions", "lunch", "audit", "push", "chamber", "guinea", "findings", "muscle", "featuring", "iso", "implement", "clicking", "scheduled", "polls", "typical", "tower", "yours", "sum", "misc", "calculator", "significantly", "chicken", "temporary", "attend", "shower", "alan", "sending", "jason", "tonight", "dear", "sufficient", "holdem", "shell", "province", "catholic", "oak", "vat", "awareness", "vancouver", "governor", "beer", "seemed", "contribution", "measurement", "swimming", "spyware", "formula", "constitution", "packaging", "solar", "jose", "catch", "jane", "pakistan", "ps", "reliable", "consultation", "northwest", "sir", "doubt", "earn", "finder", "unable", "periods", "classroom", "tasks", "democracy", "attacks", "kim", "wallpaper", "merchandise", "const", "resistance", "doors", "symptoms", "resorts", "biggest", "memorial", "visitor", "twin", "forth", "insert", "baltimore", "gateway", "ky", "dont", "alumni", "drawing", "candidates", "charlotte", "ordered", "biological", "fighting", "transition", "happens", "preferences", "spy", "romance", "instrument", "bruce", "split", "themes", "powers", "heaven", "br", "bits", "pregnant", "twice", "classification", "focused", "egypt", "physician", "hollywood", "bargain", "wikipedia", "cellular", "norway", "vermont", "asking", "blocks", "normally", "lo", "spiritual", "hunting", "diabetes", "suit", "ml", "shift", "chip", "res", "sit", "bodies", "photographs", "cutting", "wow", "simon", "writers", "marks", "flexible", "loved", "mapping", "numerous", "relatively", "birds", "satisfaction", "represents", "char", "indexed", "pittsburgh", "superior", "preferred", "saved", "paying", "cartoon", "shots", "intellectual", "moore", "granted", "choices", "carbon", "spending", "comfortable", "magnetic", "interaction", "listening", "effectively", "registry", "crisis", "outlook", "massive", "denmark", "employed", "bright", "treat", "header", "cs", "poverty", "formed", "piano", "echo", "que", "grid", "sheets", "patrick", "experimental", "puerto", "revolution", "consolidation", "displays", "plasma", "allowing", "earnings", "voip", "mystery", "landscape", "dependent", "mechanical", "journey", "delaware", "bidding", "consultants", "risks", "banner", "applicant", "charter", "fig", "barbara", "cooperation", "counties", "acquisition", "ports", "implemented", "sf", "directories", "recognized", "dreams", "blogger", "notification", "kg", "licensing", "stands", "teach", "occurred", "textbooks", "rapid", "pull", "hairy", "diversity", "cleveland", "ut", "reverse", "deposit", "seminar", "investments", "latina", "nasa", "wheels", "sexcam", "specify", "accessibility", "dutch", "sensitive", "templates", "formats", "tab", "depends", "boots", "holds", "router", "concrete", "si", "editing", "poland", "folder", "womens", "css", "completion", "upload", "pulse", "universities", "technique", "contractors", "milfhunter", "voting", "courts", "notices", "subscriptions", "calculate", "mc", "detroit", "alexander", "broadcast", "converted", "metro", "toshiba", "anniversary", "improvements", "strip", "specification", "pearl", "accident", "nick", "accessible", "accessory", "resident", "plot", "qty", "possibly", "airline", "typically", "representation", "regard", "pump", "exists", "arrangements", "smooth", "conferences", "uniprotkb", "strike", "consumption", "birmingham", "flashing", "lp", "narrow", "afternoon", "threat", "surveys", "sitting", "putting", "consultant", "controller", "ownership", "committees", "legislative", "researchers", "vietnam", "trailer", "anne", "castle", "gardens", "missed", "malaysia", "unsubscribe", "antique", "labels", "willing", "bio", "molecular", "acting", "heads", "stored", "exam", "logos", "residence", "attorneys", "milfs", "antiques", "density", "hundred", "ryan", "operators", "strange", "sustainable", "philippines", "statistical", "beds", "mention", "innovation", "pcs", "employers", "grey", "parallel", "honda", "amended", "operate", "bills", "bold", "bathroom", "stable", "opera", "definitions", "von", "doctors", "lesson", "cinema", "asset", "ag", "scan", "elections", "drinking", "reaction", "blank", "enhanced", "entitled", "severe", "generate", "stainless", "newspapers", "hospitals", "vi", "deluxe", "humor", "aged", "monitors", "exception", "lived", "duration", "bulk", "successfully", "indonesia", "pursuant", "sci", "fabric", "edt", "visits", "primarily", "tight", "domains", "capabilities", "pmid", "contrast", "recommendation", "flying", "recruitment", "sin", "berlin", "cute", "organized", "ba", "para", "siemens", "adoption", "improving", "cr", "expensive", "meant", "capture", "pounds", "buffalo", "organisations", "plane", "pg", "explained", "seed", "programmes", "desire", "expertise", "mechanism", "camping", "ee", "jewellery", "meets", "welfare", "peer", "caught", "eventually", "marked", "driven", "measured", "medline", "bottle", "agreements", "considering", "innovative", "marshall", "massage", "rubber", "conclusion", "closing", "tampa", "thousand", "meat", "legend", "grace", "susan", "ing", "ks", "adams", "python", "monster", "alex", "bang", "villa", "bone", "columns", "disorders", "bugs", "collaboration", "hamilton", "detection", "ftp", "cookies", "inner", "formation", "tutorial", "med", "engineers", "entity", "cruises", "gate", "holder", "proposals", "moderator", "sw", "tutorials", "settlement", "portugal", "lawrence", "roman", "duties", "valuable", "tone", "collectables", "ethics", "forever", "dragon", "busy", "captain", "fantastic", "imagine", "brings", "heating", "leg", "neck", "hd", "wing", "governments", "purchasing", "scripts", "abc", "stereo", "appointed", "taste", "dealing", "commit", "tiny", "operational", "rail", "airlines", "liberal", "livecam", "jay", "trips", "gap", "sides", "tube", "turns", "corresponding", "descriptions", "cache", "belt", "jacket", "determination", "animation", "oracle", "er", "matthew", "lease", "productions", "aviation", "hobbies", "proud", "excess", "disaster", "console", "commands", "jr", "telecommunications", "instructor", "giant", "achieved", "injuries", "shipped", "seats", "approaches", "biz", "alarm", "voltage", "anthony", "nintendo", "usual", "loading", "stamps", "appeared", "franklin", "angle", "rob", "vinyl", "highlights", "mining", "designers", "melbourne", "ongoing", "worst", "imaging", "betting", "scientists", "liberty", "wyoming", "blackjack", "argentina", "era", "convert", "possibility", "analyst", "commissioner", "dangerous", "garage", "exciting", "reliability", "thongs", "gcc", "unfortunately", "respectively", "volunteers", "attachment", "ringtone", "finland", "morgan", "derived", "pleasure", "honor", "asp", "oriented", "eagle", "desktops", "pants", "columbus", "nurse", "prayer", "appointment", "workshops", "hurricane", "quiet", "luck", "postage", "producer", "represented", "mortgages", "dial", "responsibilities", "cheese", "comic", "carefully", "jet", "productivity", "investors", "crown", "par", "underground", "diagnosis", "maker", "crack", "principle", "picks", "vacations", "gang", "semester", "calculated", "fetish", "applies", "casinos", "appearance", "smoke", "apache", "filters", "incorporated", "nv", "craft", "cake", "notebooks", "apart", "fellow", "blind", "lounge", "mad", "algorithm", "semi", "coins", "andy", "gross", "strongly", "cafe", "valentine", "hilton", "ken", "proteins", "horror", "su", "exp", "familiar", "capable", "douglas", "debian", "till", "involving", "pen", "investing", "christopher", "admission", "epson", "shoe", "elected", "carrying", "victory", "sand", "madison", "terrorism", "joy", "editions", "cpu", "mainly", "ethnic", "ran", "parliament", "actor", "finds", "seal", "situations", "fifth", "allocated", "citizen", "vertical", "corrections", "structural", "municipal", "describes", "prize", "sr", "occurs", "jon", "absolute", "disabilities", "consists", "anytime", "substance", "prohibited", "addressed", "lies", "pipe", "soldiers", "nr", "guardian", "lecture", "simulation", "layout", "initiatives", "ill", "concentration", "classics", "lbs", "lay", "interpretation", "horses", "lol", "dirty", "deck", "wayne", "donate", "taught", "bankruptcy", "mp", "worker", "optimization", "alive", "temple", "substances", "prove", "discovered", "wings", "breaks", "genetic", "restrictions", "participating", "waters", "promise", "thin", "exhibition", "prefer", "ridge", "cabinet", "modem", "harris", "mph", "bringing", "sick", "dose", "evaluate", "tiffany", "tropical", "collect", "bet", "composition", "toyota", "streets", "nationwide", "vector", "definitely", "shaved", "turning", "buffer", "purple", "existence", "commentary", "larry", "limousines", "developments", "def", "immigration", "destinations", "lets", "mutual", "pipeline", "necessarily", "syntax", "li", "attribute", "prison", "skill", "chairs", "nl", "everyday", "apparently", "surrounding", "mountains", "moves", "popularity", "inquiry", "ethernet", "checked", "exhibit", "throw", "trend", "sierra", "visible", "cats", "desert", "postposted", "ya", "oldest", "rhode", "nba", "coordinator", "obviously", "mercury", "steven", "handbook", "greg", "navigate", "worse", "summit", "victims", "epa", "spaces", "fundamental", "burning", "escape", "coupons", "somewhat", "receiver", "substantial", "tr", "progressive", "cialis", "bb", "boats", "glance", "scottish", "championship", "arcade", "richmond", "sacramento", "impossible", "ron", "russell", "tells", "obvious", "fiber", "depression", "graph", "covering", "platinum", "judgment", "bedrooms", "talks", "filing", "foster", "modeling", "passing", "awarded", "testimonials", "trials", "tissue", "nz", "memorabilia", "clinton", "masters", "bonds", "cartridge", "alberta", "explanation", "folk", "org", "commons", "cincinnati", "subsection", "fraud", "electricity", "permitted", "spectrum", "arrival", "okay", "pottery", "emphasis", "roger", "aspect", "workplace", "awesome", "mexican", "confirmed", "counts", "priced", "wallpapers", "hist", "crash", "lift", "desired", "inter", "closer", "assumes", "heights", "shadow", "riding", "infection", "firefox", "lisa", "expense", "grove", "eligibility", "venture", "clinic", "korean", "healing", "princess", "mall", "entering", "packet", "spray", "studios", "involvement", "dad", "buttons", "placement", "observations", "vbulletin", "funded", "thompson", "winners", "extend", "roads", "subsequent", "pat", "dublin", "rolling", "fell", "motorcycle", "yard", "disclosure", "establishment", "memories", "nelson", "te", "arrived", "creates", "faces", "tourist", "av", "mayor", "murder", "sean", "adequate", "senator", "yield", "presentations", "grades", "cartoons", "pour", "digest", "reg", "lodging", "tion", "dust", "hence", "wiki", "entirely", "replaced", "radar", "rescue", "undergraduate", "losses", "combat", "reducing", "stopped", "occupation", "lakes", "donations", "associations", "citysearch", "closely", "radiation", "diary", "seriously", "kings", "shooting", "kent", "adds", "nsw", "ear", "flags", "pci", "baker", "launched", "elsewhere", "pollution", "conservative", "guestbook", "shock", "effectiveness", "walls", "abroad", "ebony", "tie", "ward", "drawn", "arthur", "ian", "visited", "roof", "walker", "demonstrate", "atmosphere", "suggests", "kiss", "beast", "ra", "operated", "experiment", "targets", "overseas", "purchases", "dodge", "counsel", "federation", "pizza", "invited", "yards", "assignment", "chemicals", "gordon", "mod", "farmers", "rc", "queries", "bmw", "rush", "ukraine", "absence", "nearest", "cluster", "vendors", "mpeg", "whereas", "yoga", "serves", "woods", "surprise", "lamp", "rico", "partial", "shoppers", "phil", "everybody", "couples", "nashville", "ranking", "jokes", "cst", "http", "ceo", "simpson", "twiki", "sublime", "counseling", "palace", "acceptable", "satisfied", "glad", "wins", "measurements", "verify", "globe", "trusted", "copper", "milwaukee", "rack", "medication", "warehouse", "shareware", "ec", "rep", "dicke", "kerry", "receipt", "supposed", "ordinary", "nobody", "ghost", "violation", "configure", "stability", "mit", "applying", "southwest", "boss", "pride", "institutional", "expectations", "independence", "knowing", "reporter", "metabolism", "keith", "champion", "cloudy", "linda", "ross", "personally", "chile", "anna", "plenty", "solo", "sentence", "throat", "ignore", "maria", "uniform", "excellence", "wealth", "tall", "rm", "somewhere", "vacuum", "dancing", "attributes", "recognize", "brass", "writes", "plaza", "pdas", "outcomes", "survival", "quest", "publish", "sri", "screening", "toe", "thumbnail", "trans", "jonathan", "whenever", "nova", "lifetime", "api", "pioneer", "booty", "forgotten", "acrobat", "plates", "acres", "venue", "athletic", "thermal", "essays", "vital", "telling", "fairly", "coastal", "config", "cf", "charity", "intelligent", "edinburgh", "vt", "excel", "modes", "obligation", "campbell", "wake", "stupid", "harbor", "hungary", "traveler", "urw", "segment", "realize", "regardless", "lan", "enemy", "puzzle", "rising", "aluminum", "wells", "wishlist", "opens", "insight", "sms", "restricted", "republican", "secrets", "lucky", "latter", "merchants", "thick", "trailers", "repeat", "syndrome", "philips", "attendance", "penalty", "drum", "glasses", "enables", "nec", "iraqi", "builder", "vista", "jessica", "chips", "terry", "flood", "foto", "ease", "arguments", "amsterdam", "arena", "adventures", "pupils", "stewart", "announcement", "tabs", "outcome", "appreciate", "expanded", "casual", "grown", "polish", "lovely", "extras", "gm", "centres", "jerry", "clause", "smile", "lands", "ri", "troops", "indoor", "bulgaria", "armed", "broker", "charger", "regularly", "believed", "pine", "cooling", "tend", "gulf", "rt", "rick", "trucks", "cp", "mechanisms", "divorce", "laura", "shopper", "tokyo", "partly", "nikon", "customize", "tradition", "candy", "pills", "tiger", "donald", "folks", "sensor", "exposed", "telecom", "hunt", "angels", "deputy", "indicators", "sealed", "thai", "emissions", "physicians", "loaded", "fred", "complaint", "scenes", "experiments", "afghanistan", "dd", "boost", "spanking", "scholarship", "governance", "mill", "founded", "supplements", "chronic", "icons", "moral", "den", "catering", "aud", "finger", "keeps", "pound", "locate", "camcorder", "pl", "trained", "burn", "implementing", "roses", "labs", "ourselves", "bread", "tobacco", "wooden", "motors", "tough", "roberts", "incident", "gonna", "dynamics", "lie", "crm", "rf", "conversation", "decrease", "cumshots", "chest", "pension", "billy", "revenues", "emerging", "worship", "capability", "ak", "fe", "craig", "herself", "producing", "churches", "precision", "damages", "reserves", "contributed", "solve", "shorts", "reproduction", "minority", "td", "diverse", "amp", "ingredients", "sb", "ah", "johnny", "sole", "franchise", "recorder", "complaints", "facing", "sm", "nancy", "promotions", "tones", "passion", "rehabilitation", "maintaining", "sight", "laid", "clay", "defence", "patches", "weak", "refund", "usc", "towns", "environments", "trembl", "divided", "blvd", "reception", "amd", "wise", "emails", "cyprus", "wv", "odds", "correctly", "insider", "seminars", "consequences", "makers", "hearts", "geography", "appearing", "integrity", "worry", "ns", "discrimination", "eve", "carter", "legacy", "marc", "pleased", "danger", "vitamin", "widely", "processed", "phrase", "genuine", "raising", "implications", "functionality", "paradise", "hybrid", "reads", "roles", "intermediate", "emotional", "sons", "leaf", "pad", "glory", "platforms", "ja", "bigger", "billing", "diesel", "versus", "combine", "overnight", "geographic", "exceed", "bs", "rod", "saudi", "fault", "cuba", "hrs", "preliminary", "districts", "introduce", "silk", "promotional", "kate", "chevrolet", "babies", "bi", "karen", "compiled", "romantic", "revealed", "specialists", "generator", "albert", "examine", "jimmy", "graham", "suspension", "bristol", "margaret", "compaq", "sad", "correction", "wolf", "slowly", "authentication", "communicate", "rugby", "supplement", "showtimes", "cal", "portions", "infant", "promoting", "sectors", "samuel", "fluid", "grounds", "fits", "kick", "regards", "meal", "ta", "hurt", "machinery", "bandwidth", "unlike", "equation", "baskets", "probability", "pot", "dimension", "wright", "img", "barry", "proven", "schedules", "admissions", "cached", "warren", "slip", "studied", "reviewer", "involves", "quarterly", "rpm", "profits", "devil", "grass", "comply", "marie", "florist", "illustrated", "cherry", "continental", "alternate", "deutsch", "achievement", "limitations", "kenya", "webcam", "cuts", "funeral", "nutten", "earrings", "enjoyed", "automated", "chapters", "pee", "charlie", "quebec", "passenger", "convenient", "dennis", "mars", "francis", "tvs", "sized", "manga", "noticed", "socket", "silent", "literary", "egg", "mhz", "signals", "caps", "orientation", "pill", "theft", "childhood", "swing", "symbols", "lat", "meta", "humans", "analog", "facial", "choosing", "talent", "dated", "flexibility", "seeker", "wisdom", "shoot", "boundary", "mint", "packard", "offset", "payday", "philip", "elite", "gi", "spin", "holders", "believes", "swedish", "poems", "deadline", "jurisdiction", "robot", "displaying", "witness", "collins", "equipped", "stages", "encouraged", "sur", "winds", "powder", "broadway", "acquired", "assess", "wash", "cartridges", "stones", "entrance", "gnome", "roots", "declaration", "losing", "attempts", "gadgets", "noble", "glasgow", "automation", "impacts", "rev", "gospel", "advantages", "shore", "loves", "induced", "ll", "knight", "preparing", "loose", "aims", "recipient", "linking", "extensions", "appeals", "cl", "earned", "illness", "islamic", "athletics", "southeast", "ieee", "ho", "alternatives", "pending", "parker", "determining", "lebanon", "corp", "personalized", "kennedy", "gt", "sh", "conditioning", "teenage", "soap", "ae", "triple", "cooper", "nyc", "vincent", "jam", "secured", "unusual", "answered", "partnerships", "destruction", "slots", "increasingly", "migration", "disorder", "routine", "toolbar", "basically", "rocks", "conventional", "titans", "applicants", "wearing", "axis", "sought", "genes", "mounted", "habitat", "firewall", "median", "guns", "scanner", "herein", "occupational", "animated", "judicial", "rio", "hs", "adjustment", "hero", "integer", "treatments", "bachelor", "attitude", "camcorders", "engaged", "falling", "basics", "montreal", "carpet", "rv", "struct", "lenses", "binary", "genetics", "attended", "difficulty", "punk", "collective", "coalition", "pi", "dropped", "enrollment", "duke", "walter", "ai", "pace", "besides", "wage", "producers", "ot", "collector", "arc", "hosts", "interfaces", "advertisers", "moments", "atlas", "strings", "dawn", "representing", "observation", "feels", "torture", "carl", "deleted", "coat", "mitchell", "mrs", "rica", "restoration", "convenience", "returning", "ralph", "opposition", "container", "yr", "defendant", "warner", "confirmation", "app", "embedded", "inkjet", "supervisor", "wizard", "corps", "actors", "liver", "peripherals", "liable", "brochure", "morris", "bestsellers", "petition", "eminem", "recall", "antenna", "picked", "assumed", "departure", "minneapolis", "belief", "killing", "bikini", "memphis", "shoulder", "decor", "lookup", "texts", "harvard", "brokers", "roy", "ion", "diameter", "ottawa", "doll", "ic", "podcast", "seasons", "peru", "interactions", "refine", "bidder", "singer", "evans", "herald", "literacy", "fails", "aging", "nike", "intervention", "fed", "plugin", "attraction", "diving", "invite", "modification", "alice", "latinas", "suppose", "customized", "reed", "involve", "moderate", "terror", "younger", "thirty", "mice", "opposite", "understood", "rapidly", "dealtime", "ban", "temp", "intro", "mercedes", "zus", "assurance", "clerk", "happening", "vast", "mills", "outline", "amendments", "tramadol", "holland", "receives", "jeans", "metropolitan", "compilation", "verification", "fonts", "ent", "odd", "wrap", "refers", "mood", "favor", "veterans", "quiz", "mx", "sigma", "gr", "attractive", "xhtml", "occasion", "recordings", "jefferson", "victim", "demands", "sleeping", "careful", "ext", "beam", "gardening", "obligations", "arrive", "orchestra", "sunset", "tracked", "moreover", "minimal", "polyphonic", "lottery", "tops", "framed", "aside", "outsourcing", "licence", "adjustable", "allocation", "michelle", "essay", "discipline", "amy", "ts", "demonstrated", "dialogue", "identifying", "alphabetical", "camps", "declared", "dispatched", "aaron", "handheld", "trace", "disposal", "shut", "florists", "packs", "ge", "installing", "switches", "romania", "voluntary", "ncaa", "thou", "consult", "phd", "greatly", "blogging", "mask", "cycling", "midnight", "ng", "commonly", "pe", "photographer", "inform", "turkish", "coal", "cry", "messaging", "pentium", "quantum", "murray", "intent", "tt", "zoo", "largely", "pleasant", "announce", "constructed", "additions", "requiring", "spoke", "aka", "arrow", "engagement", "sampling", "rough", "weird", "tee", "refinance", "lion", "inspired", "holes", "weddings", "blade", "suddenly", "oxygen", "cookie", "meals", "canyon", "goto", "meters", "merely", "calendars", "arrangement", "conclusions", "passes", "bibliography", "pointer", "compatibility", "stretch", "durham", "furthermore", "permits", "cooperative", "muslim", "xl", "neil", "sleeve", "netscape", "cleaner", "cricket", "beef", "feeding", "stroke", "township", "rankings", "measuring", "cad", "hats", "robin", "robinson", "jacksonville", "strap", "headquarters", "sharon", "crowd", "tcp", "transfers", "surf", "olympic", "transformation", "remained", "attachments", "dv", "dir", "entities", "customs", "administrators", "personality", "rainbow", "hook", "roulette", "decline", "gloves", "israeli", "medicare", "cord", "skiing", "cloud", "facilitate", "subscriber", "valve", "val", "hewlett", "explains", "proceed", "flickr", "feelings", "knife", "jamaica", "priorities", "shelf", "bookstore", "timing", "liked", "parenting", "adopt", "denied", "fotos", "incredible", "britney", "freeware", "donation", "outer", "crop", "deaths", "rivers", "commonwealth", "pharmaceutical", "manhattan", "tales", "katrina", "workforce", "islam", "nodes", "tu", "fy", "thumbs", "seeds", "cited", "lite", "ghz", "hub", "targeted", "organizational", "skype", "realized", "twelve", "founder", "decade", "gamecube", "rr", "dispute", "portuguese", "tired", "titten", "adverse", "everywhere", "excerpt", "eng", "steam", "discharge", "ef", "drinks", "ace", "voices", "acute", "halloween", "climbing", "stood", "sing", "tons", "perfume", "carol", "honest", "albany", "hazardous", "restore", "stack", "methodology", "somebody", "sue", "ep", "housewares", "reputation", "resistant", "democrats", "recycling", "hang", "gbp", "curve", "creator", "amber", "qualifications", "museums", "coding", "slideshow", "tracker", "variation", "passage", "transferred", "trunk", "hiking", "lb", "pierre", "jelsoft", "headset", "photograph", "oakland", "colombia", "waves", "camel", "distributor", "lamps", "underlying", "hood", "wrestling", "suicide", "archived", "photoshop", "jp", "chi", "bt", "arabia", "gathering", "projection", "juice", "chase", "mathematical", "logical", "sauce", "fame", "extract", "specialized", "diagnostic", "panama", "indianapolis", "af", "payable", "corporations", "courtesy", "criticism", "automobile", "confidential", "rfc", "statutory", "accommodations", "athens", "northeast", "downloaded", "judges", "sl", "seo", "retired", "isp", "remarks", "detected", "decades", "paintings", "walked", "arising", "nissan", "bracelet", "ins", "eggs", "juvenile", "injection", "yorkshire", "populations", "protective", "afraid", "acoustic", "railway", "cassette", "initially", "indicator", "pointed", "hb", "jpg", "causing", "mistake", "norton", "locked", "eliminate", "tc", "fusion", "mineral", "sunglasses", "ruby", "steering", "beads", "fortune", "preference", "canvas", "threshold", "parish", "claimed", "screens", "cemetery", "planner", "croatia", "flows", "stadium", "venezuela", "exploration", "mins", "fewer", "sequences", "coupon", "nurses", "ssl", "stem", "proxy", "astronomy", "lanka", "opt", "edwards", "drew", "contests", "flu", "translate", "announces", "mlb", "costume", "tagged", "berkeley", "voted", "killer", "bikes", "gates", "adjusted", "rap", "tune", "bishop", "pulled", "corn", "gp", "shaped", "compression", "seasonal", "establishing", "farmer", "counters", "puts", "constitutional", "grew", "perfectly", "tin", "slave", "instantly", "cultures", "norfolk", "coaching", "examined", "trek", "encoding", "litigation", "submissions", "oem", "heroes", "painted", "lycos", "ir", "zdnet", "broadcasting", "horizontal", "artwork", "cosmetic", "resulted", "portrait", "terrorist", "informational", "ethical", "carriers", "ecommerce", "mobility", "floral", "builders", "ties", "struggle", "schemes", "suffering", "neutral", "fisher", "rat", "spears", "prospective", "bedding", "ultimately", "joining", "heading", "equally", "artificial", "bearing", "spectacular", "coordination", "connector", "brad", "combo", "seniors", "worlds", "guilty", "affiliated", "activation", "naturally", "haven", "tablet", "jury", "dos", "tail", "subscribers", "charm", "lawn", "violent", "mitsubishi", "underwear", "basin", "soup", "potentially", "ranch", "constraints", "crossing", "inclusive", "dimensional", "cottage", "drunk", "considerable", "crimes", "resolved", "mozilla", "byte", "toner", "nose", "latex", "branches", "anymore", "oclc", "delhi", "holdings", "alien", "locator", "selecting", "processors", "pantyhose", "plc", "broke", "nepal", "zimbabwe", "difficulties", "juan", "complexity", "msg", "constantly", "browsing", "resolve", "barcelona", "presidential", "documentary", "cod", "territories", "melissa", "moscow", "thesis", "thru", "jews", "nylon", "palestinian", "discs", "rocky", "bargains", "frequent", "trim", "nigeria", "ceiling", "pixels", "ensuring", "hispanic", "cv", "cb", "legislature", "hospitality", "gen", "anybody", "procurement", "diamonds", "espn", "fleet", "untitled", "bunch", "totals", "marriott", "singing", "theoretical", "afford", "exercises", "starring", "referral", "nhl", "surveillance", "optimal", "quit", "distinct", "protocols", "lung", "highlight", "substitute", "inclusion", "hopefully", "brilliant", "turner", "sucking", "cents", "reuters", "ti", "fc", "gel", "todd", "spoken", "omega", "evaluated", "stayed", "civic", "assignments", "fw", "manuals", "doug", "sees", "termination", "watched", "saver", "thereof", "grill", "households", "gs", "redeem", "rogers", "grain", "aaa", "authentic", "regime", "wanna", "wishes", "bull", "montgomery", "architectural", "louisville", "depend", "differ", "macintosh", "movements", "ranging", "monica", "repairs", "breath", "amenities", "virtually", "cole", "mart", "candle", "hanging", "colored", "authorization", "tale", "verified", "lynn", "formerly", "projector", "bp", "situated", "comparative", "std", "seeks", "herbal", "loving", "strictly", "routing", "docs", "stanley", "psychological", "surprised", "retailer", "vitamins", "elegant", "gains", "renewal", "vid", "genealogy", "opposed", "deemed", "scoring", "expenditure", "brooklyn", "liverpool", "sisters", "critics", "connectivity", "spots", "oo", "algorithms", "hacker", "madrid", "similarly", "margin", "coin", "solely", "fake", "salon", "collaborative", "norman", "fda", "excluding", "turbo", "headed", "voters", "cure", "madonna", "commander", "arch", "ni", "murphy", "thinks", "thats", "suggestion", "hdtv", "soldier", "phillips", "asin", "aimed", "justin", "bomb", "harm", "interval", "mirrors", "spotlight", "tricks", "reset", "brush", "investigate", "thy", "expansys", "panels", "repeated", "assault", "connecting", "spare", "logistics", "deer", "kodak", "tongue", "bowling", "tri", "danish", "pal", "monkey", "proportion", "filename", "skirt", "florence", "invest", "honey", "um", "analyzes", "drawings", "significance", "scenario", "ye", "fs", "lovers", "atomic", "approx", "symposium", "arabic", "gauge", "essentials", "junction", "protecting", "nn", "faced", "mat", "rachel", "solving", "transmitted", "weekends", "screenshots", "produces", "oven", "ted", "intensive", "chains", "kingston", "sixth", "engage", "deviant", "noon", "switching", "quoted", "adapters", "correspondence", "farms", "imports", "supervision", "cheat", "bronze", "expenditures", "sandy", "separation", "testimony", "suspect", "celebrities", "macro", "sender", "mandatory", "boundaries", "crucial", "syndication", "gym", "celebration", "kde", "adjacent", "filtering", "tuition", "spouse", "exotic", "viewer", "signup", "threats", "luxembourg", "puzzles", "reaching", "vb", "damaged", "cams", "receptor", "laugh", "joel", "surgical", "destroy", "citation", "pitch", "autos", "yo", "premises", "perry", "proved", "offensive", "imperial", "dozen", "benjamin", "deployment", "teeth", "cloth", "studying", "colleagues", "stamp", "lotus", "salmon", "olympus", "separated", "proc", "cargo", "tan", "directive", "fx", "salem", "mate", "dl", "starter", "upgrades", "likes", "butter", "pepper", "weapon", "luggage", "burden", "chef", "tapes", "zones", "races", "isle", "stylish", "slim", "maple", "luke", "grocery", "offshore", "governing", "retailers", "depot", "kenneth", "comp", "alt", "pie", "blend", "harrison", "ls", "julie", "occasionally", "cbs", "attending", "emission", "pete", "spec", "finest", "realty", "janet", "bow", "penn", "recruiting", "apparent", "instructional", "phpbb", "autumn", "traveling", "probe", "midi", "permissions", "biotechnology", "toilet", "ranked", "jackets", "routes", "packed", "excited", "outreach", "helen", "mounting", "recover", "tied", "lopez", "balanced", "prescribed", "catherine", "timely", "talked", "upskirts", "debug", "delayed", "chuck", "reproduced", "hon", "dale", "explicit", "calculation", "villas", "ebook", "consolidated", "exclude", "peeing", "occasions", "brooks", "equations", "newton", "oils", "sept", "exceptional", "anxiety", "bingo", "whilst", "spatial", "respondents", "unto", "lt", "ceramic", "prompt", "precious", "minds", "annually", "considerations", "scanners", "atm", "xanax", "eq", "pays", "fingers", "sunny", "ebooks", "delivers", "je", "queensland", "necklace", "musicians", "leeds", "composite", "unavailable", "cedar", "arranged", "lang", "theaters", "advocacy", "raleigh", "stud", "fold", "essentially", "designing", "threaded", "uv", "qualify", "blair", "hopes", "assessments", "cms", "mason", "diagram", "burns", "pumps", "footwear", "sg", "vic", "beijing", "peoples", "victor", "mario", "pos", "attach", "licenses", "utils", "removing", "advised", "brunswick", "spider", "phys", "ranges", "pairs", "sensitivity", "trails", "preservation", "hudson", "isolated", "calgary", "interim", "assisted", "divine", "streaming", "approve", "chose", "compound", "intensity", "technological", "syndicate", "abortion", "dialog", "venues", "blast", "wellness", "calcium", "newport", "antivirus", "addressing", "pole", "discounted", "indians", "shield", "harvest", "membrane", "prague", "previews", "bangladesh", "constitute", "locally", "concluded", "pickup", "desperate", "mothers", "nascar", "iceland", "demonstration", "governmental", "manufactured", "candles", "graduation", "mega", "bend", "sailing", "variations", "moms", "sacred", "addiction", "morocco", "chrome", "tommy", "springfield", "refused", "brake", "exterior", "greeting", "ecology", "oliver", "congo", "glen", "botswana", "nav", "delays", "synthesis", "olive", "undefined", "unemployment", "cyber", "verizon", "scored", "enhancement", "newcastle", "clone", "dicks", "velocity", "lambda", "relay", "composed", "tears", "performances", "oasis", "baseline", "cab", "angry", "fa", "societies", "silicon", "brazilian", "identical", "petroleum", "compete", "ist", "norwegian", "lover", "belong", "honolulu", "beatles", "lips", "retention", "exchanges", "pond", "rolls", "thomson", "barnes", "soundtrack", "wondering", "malta", "daddy", "lc", "ferry", "rabbit", "profession", "seating", "dam", "cnn", "separately", "physiology", "lil", "collecting", "das", "exports", "omaha", "tire", "participant", "scholarships", "recreational", "dominican", "chad", "electron", "loads", "friendship", "heather", "passport", "motel", "unions", "treasury", "warrant", "sys", "solaris", "frozen", "occupied", "josh", "royalty", "scales", "rally", "observer", "sunshine", "strain", "drag", "ceremony", "somehow", "arrested", "expanding", "provincial", "investigations", "icq", "ripe", "yamaha", "rely", "medications", "hebrew", "gained", "rochester", "dying", "laundry", "stuck", "solomon", "placing", "stops", "homework", "adjust", "assessed", "advertiser", "enabling", "encryption", "filling", "downloadable", "sophisticated", "imposed", "silence", "scsi", "focuses", "soviet", "possession", "cu", "laboratories", "treaty", "vocal", "trainer", "organ", "stronger", "volumes", "advances", "vegetables", "lemon", "toxic", "dns", "thumbnails", "darkness", "pty", "ws", "nuts", "nail", "bizrate", "vienna", "implied", "span", "stanford", "sox", "stockings", "joke", "respondent", "packing", "statute", "rejected", "satisfy", "destroyed", "shelter", "chapel", "gamespot", "manufacture", "layers", "wordpress", "guided", "vulnerability", "accountability", "celebrate", "accredited", "appliance", "compressed", "bahamas", "powell", "mixture", "bench", "univ", "tub", "rider", "scheduling", "radius", "perspectives", "mortality", "logging", "hampton", "christians", "borders", "therapeutic", "pads", "butts", "inns", "bobby", "impressive", "sheep", "accordingly", "architect", "railroad", "lectures", "challenging", "wines", "nursery", "harder", "cups", "ash", "microwave", "cheapest", "accidents", "travesti", "relocation", "stuart", "contributors", "salvador", "ali", "salad", "np", "monroe", "tender", "violations", "foam", "temperatures", "paste", "clouds", "competitions", "discretion", "tft", "tanzania", "preserve", "jvc", "poem", "unsigned", "staying", "cosmetics", "easter", "theories", "repository", "praise", "jeremy", "venice", "jo", "concentrations", "vibrators", "estonia", "christianity", "veteran", "streams", "landing", "signing", "executed", "katie", "negotiations", "realistic", "dt", "cgi", "showcase", "integral", "asks", "relax", "namibia", "generating", "christina", "congressional", "synopsis", "hardly", "prairie", "reunion", "composer", "bean", "sword", "absent", "photographic", "sells", "ecuador", "hoping", "accessed", "spirits", "modifications", "coral", "pixel", "float", "colin", "bias", "imported", "paths", "bubble", "por", "acquire", "contrary", "millennium", "tribune", "vessel", "acids", "focusing", "viruses", "cheaper", "admitted", "dairy", "admit", "mem", "fancy", "equality", "samoa", "gc", "achieving", "tap", "stickers", "fisheries", "exceptions", "reactions", "leasing", "lauren", "beliefs", "ci", "macromedia", "companion", "squad", "analyze", "ashley", "scroll", "relate", "divisions", "swim", "wages", "additionally", "suffer", "forests", "fellowship", "nano", "invalid", "concerts", "martial", "males", "victorian", "retain", "execute", "tunnel", "genres", "cambodia", "patents", "copyrights", "yn", "chaos", "lithuania", "mastercard", "wheat", "chronicles", "obtaining", "beaver", "updating", "distribute", "readings", "decorative", "kijiji", "confused", "compiler", "enlargement", "eagles", "bases", "vii", "accused", "bee", "campaigns", "unity", "loud", "conjunction", "bride", "rats", "defines", "airports", "instances", "indigenous", "begun", "cfr", "brunette", "packets", "anchor", "socks", "validation", "parade", "corruption", "stat", "trigger", "incentives", "cholesterol", "gathered", "essex", "slovenia", "notified", "differential", "beaches", "folders", "dramatic", "surfaces", "terrible", "routers", "cruz", "pendant", "dresses", "baptist", "scientist", "starsmerchant", "hiring", "clocks", "arthritis", "bios", "females", "wallace", "nevertheless", "reflects", "taxation", "fever", "pmc", "cuisine", "surely", "practitioners", "transcript", "myspace", "theorem", "inflation", "thee", "nb", "ruth", "pray", "stylus", "compounds", "pope", "drums", "contracting", "arnold", "structured", "reasonably", "jeep", "chicks", "bare", "hung", "cattle", "mba", "radical", "graduates", "rover", "recommends", "controlling", "treasure", "reload", "distributors", "flame", "levitra", "tanks", "assuming", "monetary", "elderly", "pit", "arlington", "mono", "particles", "floating", "extraordinary", "tile", "indicating", "bolivia", "spell", "hottest", "stevens", "coordinate", "kuwait", "exclusively", "emily", "alleged", "limitation", "widescreen", "compile", "squirting", "webster", "struck", "rx", "illustration", "plymouth", "warnings", "construct", "apps", "inquiries", "bridal", "annex", "mag", "gsm", "inspiration", "tribal", "curious", "affecting", "freight", "rebate", "meetup", "eclipse", "sudan", "ddr", "downloading", "rec", "shuttle", "aggregate", "stunning", "cycles", "affects", "forecasts", "detect", "actively", "ciao", "ampland", "knee", "prep", "pb", "complicated", "chem", "fastest", "butler", "shopzilla", "injured", "decorating", "payroll", "cookbook", "expressions", "ton", "courier", "uploaded", "shakespeare", "hints", "collapse", "americas", "connectors", "twinks", "unlikely", "oe", "gif", "pros", "conflicts", "techno", "beverage", "tribute", "wired", "elvis", "immune", "latvia", "travelers", "forestry", "barriers", "cant", "jd", "rarely", "gpl", "infected", "offerings", "martha", "genesis", "barrier", "argue", "incorrect", "trains", "metals", "bicycle", "furnishings", "letting", "arise", "guatemala", "celtic", "thereby", "irc", "jamie", "particle", "perception", "minerals", "advise", "humidity", "bottles", "boxing", "wy", "dm", "bangkok", "renaissance", "pathology", "sara", "bra", "ordinance", "hughes", "photographers", "infections", "jeffrey", "chess", "operates", "brisbane", "configured", "survive", "oscar", "festivals", "menus", "joan", "possibilities", "duck", "reveal", "canal", "amino", "phi", "contributing", "herbs", "clinics", "mls", "cow", "manitoba", "analytical", "missions", "watson", "lying", "costumes", "strict", "dive", "saddam", "circulation", "drill", "offense", "bryan", "cet", "protest", "assumption", "jerusalem", "hobby", "tries", "transexuales", "invention", "nickname", "fiji", "technician", "inline", "executives", "enquiries", "washing", "audi", "staffing", "cognitive", "exploring", "trick", "enquiry", "closure", "raid", "ppc", "timber", "volt", "intense", "div", "playlist", "registrar", "showers", "supporters", "ruling", "steady", "dirt", "statutes", "withdrawal", "myers", "drops", "predicted", "wider", "saskatchewan", "jc", "cancellation", "plugins", "enrolled", "sensors", "screw", "ministers", "publicly", "hourly", "blame", "geneva", "freebsd", "veterinary", "acer", "prostores", "reseller", "dist", "handed", "suffered", "intake", "informal", "relevance", "incentive", "butterfly", "tucson", "mechanics", "heavily", "swingers", "fifty", "headers", "mistakes", "numerical", "ons", "geek", "uncle", "defining", "xnxx", "counting", "reflection", "sink", "accompanied", "assure", "invitation", "devoted", "princeton", "jacob", "sodium", "randy", "spirituality", "hormone", "meanwhile", "proprietary", "timothy", "childrens", "brick", "grip", "naval", "thumbzilla", "medieval", "porcelain", "avi", "bridges", "pichunter", "captured", "watt", "thehun", "decent", "casting", "dayton", "translated", "shortly", "cameron", "columnists", "pins", "carlos", "reno", "donna", "andreas", "warrior", "diploma", "cabin", "innocent", "scanning", "ide", "consensus", "polo", "valium", "copying", "rpg", "delivering", "cordless", "patricia", "horn", "eddie", "uganda", "fired", "journalism", "pd", "prot", "trivia", "adidas", "perth", "frog", "grammar", "intention", "syria", "disagree", "klein", "harvey", "tires", "logs", "undertaken", "tgp", "hazard", "retro", "leo", "livesex", "statewide", "semiconductor", "gregory", "episodes", "boolean", "circular", "anger", "diy", "mainland", "illustrations", "suits", "chances", "interact", "snap", "happiness", "arg", "substantially", "bizarre", "glenn", "ur", "auckland", "olympics", "fruits", "identifier", "geo", "worldsex", "ribbon", "calculations", "doe", "jpeg", "conducting", "startup", "suzuki", "trinidad", "ati", "kissing", "wal", "handy", "swap", "exempt", "crops", "reduces", "accomplished", "calculators", "geometry", "impression", "abs", "slovakia", "flip", "guild", "correlation", "gorgeous", "capitol", "sim", "dishes", "rna", "barbados", "chrysler", "nervous", "refuse", "extends", "fragrance", "mcdonald", "replica", "plumbing", "brussels", "tribe", "neighbors", "trades", "superb", "buzz", "transparent", "nuke", "rid", "trinity", "charleston", "handled", "legends", "boom", "calm", "champions", "floors", "selections", "projectors", "inappropriate", "exhaust", "comparing", "shanghai", "speaks", "burton", "vocational", "davidson", "copied", "scotia", "farming", "gibson", "pharmacies", "fork", "troy", "ln", "roller", "introducing", "batch", "organize", "appreciated", "alter", "nicole", "latino", "ghana", "edges", "uc", "mixing", "handles", "skilled", "fitted", "albuquerque", "harmony", "distinguished", "asthma", "projected", "assumptions", "shareholders", "twins", "developmental", "rip", "zope", "regulated", "triangle", "amend", "anticipated", "oriental", "reward", "windsor", "zambia", "completing", "gmbh", "buf", "ld", "hydrogen", "webshots", "sprint", "comparable", "chick", "advocate", "sims", "confusion", "copyrighted", "tray", "inputs", "warranties", "genome", "escorts", "documented", "thong", "medal", "paperbacks", "coaches", "vessels", "walks", "sol", "keyboards", "sage", "knives", "eco", "vulnerable", "arrange", "artistic", "bat", "honors", "booth", "indie", "reflected", "unified", "bones", "breed", "detector", "ignored", "polar", "fallen", "precise", "sussex", "respiratory", "notifications", "msgid", "transexual", "mainstream", "invoice", "evaluating", "lip", "subcommittee", "sap", "gather", "suse", "maternity", "backed", "alfred", "colonial", "mf", "carey", "motels", "forming", "embassy", "cave", "journalists", "danny", "rebecca", "slight", "proceeds", "indirect", "amongst", "wool", "foundations", "msgstr", "arrest", "volleyball", "mw", "adipex", "horizon", "nu", "deeply", "toolbox", "ict", "marina", "liabilities", "prizes", "bosnia", "browsers", "decreased", "patio", "dp", "tolerance", "surfing", "creativity", "lloyd", "describing", "optics", "pursue", "lightning", "overcome", "eyed", "ou", "quotations", "grab", "inspector", "attract", "brighton", "beans", "bookmarks", "ellis", "disable", "snake", "succeed", "leonard", "lending", "oops", "reminder", "xi", "searched", "behavioral", "riverside", "bathrooms", "plains", "sku", "ht", "raymond", "insights", "abilities", "initiated", "sullivan", "za", "midwest", "karaoke", "trap", "lonely", "fool", "ve", "nonprofit", "lancaster", "suspended", "hereby", "observe", "julia", "containers", "attitudes", "karl", "berry", "collar", "simultaneously", "racial", "integrate", "bermuda", "amanda", "sociology", "mobiles", "screenshot", "exhibitions", "kelkoo", "confident", "retrieved", "exhibits", "officially", "consortium", "dies", "terrace", "bacteria", "pts", "replied", "seafood", "novels", "rh", "rrp", "recipients", "ought", "delicious", "traditions", "fg", "jail", "safely", "finite", "kidney", "periodically", "fixes", "sends", "durable", "mazda", "allied", "throws", "moisture", "hungarian", "roster", "referring", "symantec", "spencer", "wichita", "nasdaq", "uruguay", "ooo", "hz", "transform", "timer", "tablets", "tuning", "gotten", "educators", "tyler", "futures", "vegetable", "verse", "highs", "humanities", "independently", "wanting", "custody", "scratch", "launches", "ipaq", "alignment", "masturbating", "henderson", "bk", "britannica", "comm", "ellen", "competitors", "nhs", "rocket", "aye", "bullet", "towers", "racks", "lace", "nasty", "visibility", "latitude", "consciousness", "ste", "tumor", "ugly", "deposits", "beverly", "mistress", "encounter", "trustees", "watts", "duncan", "reprints", "hart", "bernard", "resolutions", "ment", "accessing", "forty", "tubes", "attempted", "col", "midlands", "priest", "floyd", "ronald", "analysts", "queue", "dx", "sk", "trance", "locale", "nicholas", "biol", "yu", "bundle", "hammer", "invasion", "witnesses", "runner", "rows", "administered", "notion", "sq", "skins", "mailed", "oc", "fujitsu", "spelling", "arctic", "exams", "rewards", "beneath", "strengthen", "defend", "aj", "frederick", "medicaid", "treo", "infrared", "seventh", "gods", "une", "welsh", "belly", "aggressive", "tex", "advertisements", "quarters", "stolen", "cia", "sublimedirectory", "soonest", "haiti", "disturbed", "determines", "sculpture", "poly", "ears", "dod", "wp", "fist", "naturals", "neo", "motivation", "lenders", "pharmacology", "fitting", "fixtures", "bloggers", "mere", "agrees", "passengers", "quantities", "petersburg", "consistently", "powerpoint", "cons", "surplus", "elder", "sonic", "obituaries", "cheers", "dig", "taxi", "punishment", "appreciation", "subsequently", "om", "belarus", "nat", "zoning", "gravity", "providence", "thumb", "restriction", "incorporate", "backgrounds", "treasurer", "guitars", "essence", "flooring", "lightweight", "ethiopia", "tp", "mighty", "athletes", "humanity", "transcription", "jm", "holmes", "complications", "scholars", "dpi", "scripting", "gis", "remembered", "galaxy", "chester", "snapshot", "caring", "loc", "worn", "synthetic", "shaw", "vp", "segments", "testament", "expo", "dominant", "twist", "specifics", "itunes", "stomach", "partially", "buried", "cn", "newbie", "minimize", "darwin", "ranks", "wilderness", "debut", "generations", "tournaments", "bradley", "deny", "anatomy", "bali", "judy", "sponsorship", "headphones", "fraction", "trio", "proceeding", "cube", "defects", "volkswagen", "uncertainty", "breakdown", "milton", "marker", "reconstruction", "subsidiary", "strengths", "clarity", "rugs", "sandra", "adelaide", "encouraging", "furnished", "monaco", "settled", "folding", "emirates", "terrorists", "airfare", "comparisons", "beneficial", "distributions", "vaccine", "belize", "fate", "viewpicture", "promised", "volvo", "penny", "robust", "bookings", "threatened", "minolta", "republicans", "discusses", "gui", "porter", "gras", "jungle", "ver", "rn", "responded", "rim", "abstracts", "zen", "ivory", "alpine", "dis", "prediction", "pharmaceuticals", "andale", "fabulous", "remix", "alias", "thesaurus", "individually", "battlefield", "literally", "newer", "kay", "ecological", "spice", "oval", "implies", "cg", "soma", "ser", "cooler", "appraisal", "consisting", "maritime", "periodic", "submitting", "overhead", "ascii", "prospect", "shipment", "breeding", "citations", "geographical", "donor", "mozambique", "tension", "href", "benz", "trash", "shapes", "wifi", "tier", "fwd", "earl", "manor", "envelope", "diane", "homeland", "disclaimers", "championships", "excluded", "andrea", "breeds", "rapids", "disco", "sheffield", "bailey", "aus", "endif", "finishing", "emotions", "wellington", "incoming", "prospects", "lexmark", "cleaners", "bulgarian", "hwy", "eternal", "cashiers", "guam", "cite", "aboriginal", "remarkable", "rotation", "nam", "preventing", "productive", "boulevard", "eugene", "ix", "gdp", "pig", "metric", "compliant", "minus", "penalties", "bennett", "imagination", "hotmail", "refurbished", "joshua", "armenia", "varied", "grande", "closest", "activated", "actress", "mess", "conferencing", "assign", "armstrong", "politicians", "trackbacks", "lit", "accommodate", "tigers", "aurora", "una", "slides", "milan", "premiere", "lender", "villages", "shade", "chorus", "christine", "rhythm", "digit", "argued", "dietary", "symphony", "clarke", "sudden", "accepting", "precipitation", "marilyn", "lions", "findlaw", "ada", "pools", "tb", "lyric", "claire", "isolation", "speeds", "sustained", "matched", "approximate", "rope", "carroll", "rational", "programmer", "fighters", "chambers", "dump", "greetings", "inherited", "warming", "incomplete", "vocals", "chronicle", "fountain", "chubby", "grave", "legitimate", "biographies", "burner", "yrs", "foo", "investigator", "gba", "plaintiff", "finnish", "gentle", "bm", "prisoners", "deeper", "muslims", "hose", "mediterranean", "nightlife", "footage", "howto", "worthy", "reveals", "architects", "saints", "entrepreneur", "carries", "sig", "freelance", "duo", "excessive", "devon", "screensaver", "helena", "saves", "regarded", "valuation", "unexpected", "cigarette", "fog", "characteristic", "marion", "lobby", "egyptian", "tunisia", "metallica", "outlined", "consequently", "headline", "treating", "punch", "appointments", "str", "gotta", "cowboy", "narrative", "bahrain", "enormous", "karma", "consist", "betty", "queens", "academics", "pubs", "quantitative", "shemales", "lucas", "screensavers", "subdivision", "tribes", "vip", "defeat", "clicks", "distinction", "honduras", "naughty", "hazards", "insured", "harper", "livestock", "mardi", "exemption", "tenant", "sustainability", "cabinets", "tattoo", "shake", "algebra", "shadows", "holly", "formatting", "silly", "nutritional", "yea", "mercy", "hartford", "freely", "marcus", "sunrise", "wrapping", "mild", "fur", "nicaragua", "weblogs", "timeline", "tar", "belongs", "rj", "readily", "affiliation", "soc", "fence", "nudist", "infinite", "diana", "ensures", "relatives", "lindsay", "clan", "legally", "shame", "satisfactory", "revolutionary", "bracelets", "sync", "civilian", "telephony", "mesa", "fatal", "remedy", "realtors", "breathing", "briefly", "thickness", "adjustments", "graphical", "genius", "discussing", "aerospace", "fighter", "meaningful", "flesh", "retreat", "adapted", "barely", "wherever", "estates", "rug", "democrat", "borough", "maintains", "failing", "shortcuts", "ka", "retained", "voyeurweb", "pamela", "andrews", "marble", "extending", "jesse", "specifies", "hull", "logitech", "surrey", "briefing", "belkin", "dem", "accreditation", "wav", "blackberry", "highland", "meditation", "modular", "microphone", "macedonia", "combining", "brandon", "instrumental", "giants", "organizing", "shed", "balloon", "moderators", "winston", "memo", "ham", "solved", "tide", "kazakhstan", "hawaiian", "standings", "partition", "invisible", "gratuit", "consoles", "funk", "fbi", "qatar", "magnet", "translations", "porsche", "cayman", "jaguar", "reel", "sheer", "commodity", "posing", "kilometers", "rp", "bind", "thanksgiving", "rand", "hopkins", "urgent", "guarantees", "infants", "gothic", "cylinder", "witch", "buck", "indication", "eh", "congratulations", "tba", "cohen", "sie", "usgs", "puppy", "kathy", "acre", "graphs", "surround", "cigarettes", "revenge", "expires", "enemies", "lows", "controllers", "aqua", "chen", "emma", "consultancy", "finances", "accepts", "enjoying", "conventions", "eva", "patrol", "smell", "pest", "hc", "italiano", "coordinates", "rca", "fp", "carnival", "roughly", "sticker", "promises", "responding", "reef", "physically", "divide", "stakeholders", "hydrocodone", "gst", "consecutive", "cornell", "satin", "bon", "deserve", "attempting", "mailto", "promo", "jj", "representations", "chan", "worried", "tunes", "garbage", "competing", "combines", "mas", "beth", "bradford", "len", "phrases", "kai", "peninsula", "chelsea", "boring", "reynolds", "dom", "jill", "accurately", "speeches", "reaches", "schema", "considers", "sofa", "catalogs", "ministries", "vacancies", "quizzes", "parliamentary", "obj", "prefix", "lucia", "savannah", "barrel", "typing", "nerve", "dans", "planets", "deficit", "boulder", "pointing", "renew", "coupled", "viii", "myanmar", "metadata", "harold", "circuits", "floppy", "texture", "handbags", "jar", "ev", "somerset", "incurred", "acknowledge", "thoroughly", "antigua", "nottingham", "thunder", "tent", "caution", "identifies", "questionnaire", "qualification", "locks", "modelling", "namely", "miniature", "dept", "hack", "dare", "euros", "interstate", "pirates", "aerial", "hawk", "consequence", "rebel", "systematic", "perceived", "origins", "hired", "makeup", "textile", "lamb", "madagascar", "nathan", "tobago", "presenting", "cos", "troubleshooting", "uzbekistan", "indexes", "pac", "rl", "erp", "centuries", "gl", "magnitude", "ui", "richardson", "hindu", "dh", "fragrances", "vocabulary", "licking", "earthquake", "vpn", "fundraising", "fcc", "markers", "weights", "albania", "geological", "assessing", "lasting", "wicked", "eds", "introduces", "kills", "roommate", "webcams", "pushed", "webmasters", "ro", "df", "computational", "acdbentity", "participated", "junk", "handhelds", "wax", "lucy", "answering", "hans", "impressed", "slope", "reggae", "failures", "poet", "conspiracy", "surname", "theology", "nails", "evident", "whats", "rides", "rehab", "epic", "saturn", "organizer", "nut", "allergy", "sake", "twisted", "combinations", "preceding", "merit", "enzyme", "cumulative", "zshops", "planes", "edmonton", "tackle", "disks", "condo", "pokemon", "amplifier", "ambien", "arbitrary", "prominent", "retrieve", "lexington", "vernon", "sans", "worldcat", "titanium", "irs", "fairy", "builds", "contacted", "shaft", "lean", "bye", "cdt", "recorders", "occasional", "leslie", "casio", "deutsche", "ana", "postings", "innovations", "kitty", "postcards", "dude", "drain", "monte", "fires", "algeria", "blessed", "luis", "reviewing", "cardiff", "cornwall", "favors", "potato", "panic", "explicitly", "sticks", "leone", "transsexual", "ez", "citizenship", "excuse", "reforms", "basement", "onion", "strand", "pf", "sandwich", "uw", "lawsuit", "alto", "informative", "girlfriend", "bloomberg", "cheque", "hierarchy", "influenced", "banners", "reject", "eau", "abandoned", "bd", "circles", "italic", "beats", "merry", "mil", "scuba", "gore", "complement", "cult", "dash", "passive", "mauritius", "valued", "cage", "checklist", "bangbus", "requesting", "courage", "verde", "lauderdale", "scenarios", "gazette", "hitachi", "divx", "extraction", "batman", "elevation", "hearings", "coleman", "hugh", "lap", "utilization", "beverages", "calibration", "jake", "eval", "efficiently", "anaheim", "ping", "textbook", "dried", "entertaining", "prerequisite", "luther", "frontier", "settle", "stopping", "refugees", "knights", "hypothesis", "palmer", "medicines", "flux", "derby", "sao", "peaceful", "altered", "pontiac", "regression", "doctrine", "scenic", "trainers", "muze", "enhancements", "renewable", "intersection", "passwords", "sewing", "consistency", "collectors", "conclude", "munich", "oman", "celebs", "gmc", "propose", "hh", "azerbaijan", "lighter", "rage", "adsl", "uh", "prix", "astrology", "advisors", "pavilion", "tactics", "trusts", "occurring", "supplemental", "travelling", "talented", "annie", "pillow", "induction", "derek", "precisely", "shorter", "harley", "spreading", "provinces", "relying", "finals", "paraguay", "steal", "parcel", "refined", "fd", "bo", "fifteen", "widespread", "incidence", "fears", "predict", "boutique", "acrylic", "rolled", "tuner", "avon", "incidents", "peterson", "rays", "asn", "shannon", "toddler", "enhancing", "flavor", "alike", "walt", "homeless", "horrible", "hungry", "metallic", "acne", "blocked", "interference", "warriors", "palestine", "listprice", "libs", "undo", "cadillac", "atmospheric", "malawi", "wm", "pk", "sagem", "knowledgestorm", "dana", "halo", "ppm", "curtis", "parental", "referenced", "strikes", "lesser", "publicity", "marathon", "ant", "proposition", "gays", "pressing", "gasoline", "apt", "dressed", "scout", "belfast", "exec", "dealt", "niagara", "inf", "eos", "warcraft", "charms", "catalyst", "trader", "bucks", "allowance", "vcr", "denial", "uri", "designation", "thrown", "prepaid", "raises", "gem", "duplicate", "electro", "criterion", "badge", "wrist", "civilization", "analyzed", "vietnamese", "heath", "tremendous", "ballot", "lexus", "varying", "remedies", "validity", "trustee", "maui", "handjobs", "weighted", "angola", "squirt", "performs", "plastics", "realm", "corrected", "jenny", "helmet", "salaries", "postcard", "elephant", "yemen", "encountered", "tsunami", "scholar", "nickel", "internationally", "surrounded", "psi", "buses", "expedia", "geology", "pct", "wb", "creatures", "coating", "commented", "wallet", "cleared", "smilies", "vids", "accomplish", "boating", "drainage", "shakira", "corners", "broader", "vegetarian", "rouge", "yeast", "yale", "newfoundland", "sn", "qld", "pas", "clearing", "investigated", "dk", "ambassador", "coated", "intend", "stephanie", "contacting", "vegetation", "doom", "findarticles", "louise", "kenny", "specially", "owen", "routines", "hitting", "yukon", "beings", "bite", "issn", "aquatic", "reliance", "habits", "striking", "myth", "infectious", "podcasts", "singh", "gig", "gilbert", "sas", "ferrari", "continuity", "brook", "fu", "outputs", "phenomenon", "ensemble", "insulin", "assured", "biblical", "weed", "conscious", "accent", "mysimon", "eleven", "wives", "ambient", "utilize", "mileage", "oecd", "prostate", "adaptor", "auburn", "unlock", "hyundai", "pledge", "vampire", "angela", "relates", "nitrogen", "xerox", "dice", "merger", "softball", "referrals", "quad", "dock", "differently", "firewire", "mods", "nextel", "framing", "musician", "blocking", "rwanda", "sorts", "integrating", "vsnet", "limiting", "dispatch", "revisions", "papua", "restored", "hint", "armor", "riders", "chargers", "remark", "dozens", "varies", "msie", "reasoning", "wn", "liz", "rendered", "picking", "charitable", "guards", "annotated", "ccd", "sv", "convinced", "openings", "buys", "burlington", "replacing", "researcher", "watershed", "councils", "occupations", "acknowledged", "kruger", "pockets", "granny", "pork", "zu", "equilibrium", "viral", "inquire", "pipes", "characterized", "laden", "aruba", "cottages", "realtor", "merge", "privilege", "edgar", "develops", "qualifying", "chassis", "dubai", "estimation", "barn", "pushing", "llp", "fleece", "pediatric", "boc", "fare", "dg", "asus", "pierce", "allan", "dressing", "techrepublic", "sperm", "vg", "bald", "filme", "craps", "fuji", "frost", "leon", "institutes", "mold", "dame", "fo", "sally", "yacht", "tracy", "prefers", "drilling", "brochures", "herb", "tmp", "alot", "ate", "breach", "whale", "traveller", "appropriations", "suspected", "tomatoes", "benchmark", "beginners", "instructors", "highlighted", "bedford", "stationery", "idle", "mustang", "unauthorized", "clusters", "antibody", "competent", "momentum", "fin", "wiring", "io", "pastor", "mud", "calvin", "uni", "shark", "contributor", "demonstrates", "phases", "grateful", "emerald", "gradually", "laughing", "grows", "cliff", "desirable", "tract", "ul", "ballet", "ol", "journalist", "abraham", "js", "bumper", "afterwards", "webpage", "religions", "garlic", "hostels", "shine", "senegal", "explosion", "pn", "banned", "wendy", "briefs", "signatures", "diffs", "cove", "mumbai", "ozone", "disciplines", "casa", "mu", "daughters", "conversations", "radios", "tariff", "nvidia", "opponent", "pasta", "simplified", "muscles", "serum", "wrapped", "swift", "motherboard", "runtime", "inbox", "focal", "bibliographic", "eden", "distant", "incl", "champagne", "ala", "decimal", "hq", "deviation", "superintendent", "propecia", "dip", "nbc", "samba", "hostel", "housewives", "employ", "mongolia", "penguin", "magical", "influences", "inspections", "irrigation", "miracle", "manually", "reprint", "reid", "wt", "hydraulic", "centered", "robertson", "flex", "yearly", "penetration", "wound", "belle", "rosa", "conviction", "hash", "omissions", "writings", "hamburg", "lazy", "mv", "mpg", "retrieval", "qualities", "cindy", "fathers", "carb", "charging", "cas", "marvel", "lined", "cio", "dow", "prototype", "importantly", "rb", "petite", "apparatus", "upc", "terrain", "dui", "pens", "explaining", "yen", "strips", "gossip", "rangers", "nomination", "empirical", "mh", "rotary", "worm", "dependence", "discrete", "beginner", "boxed", "lid", "sexuality", "polyester", "cubic", "deaf", "commitments", "suggesting", "sapphire", "kinase", "skirts", "mats", "remainder", "crawford", "labeled", "privileges", "televisions", "specializing", "marking", "commodities", "pvc", "serbia", "sheriff", "griffin", "declined", "guyana", "spies", "blah", "mime", "neighbor", "motorcycles", "elect", "highways", "thinkpad", "concentrate", "intimate", "reproductive", "preston", "deadly", "feof", "bunny", "chevy", "molecules", "rounds", "longest", "refrigerator", "tions", "intervals", "sentences", "dentists", "usda", "exclusion", "workstation", "holocaust", "keen", "flyer", "peas", "dosage", "receivers", "urls", "disposition", "variance", "navigator", "investigators", "cameroon", "baking", "marijuana", "adaptive", "computed", "needle", "baths", "enb", "gg", "cathedral", "brakes", "og", "nirvana", "ko", "fairfield", "owns", "til", "invision", "sticky", "destiny", "generous", "madness", "emacs", "climb", "blowing", "fascinating", "landscapes", "heated", "lafayette", "jackie", "wto", "computation", "hay", "cardiovascular", "ww", "sparc", "cardiac", "salvation", "dover", "adrian", "predictions", "accompanying", "vatican", "brutal", "learners", "gd", "selective", "arbitration", "configuring", "token", "editorials", "zinc", "sacrifice", "seekers", "guru", "isa", "removable", "convergence", "yields", "gibraltar", "levy", "suited", "numeric", "anthropology", "skating", "kinda", "aberdeen", "emperor", "grad", "malpractice", "dylan", "bras", "belts", "blacks", "educated", "rebates", "reporters", "burke", "proudly", "pix", "necessity", "rendering", "mic", "inserted", "pulling", "basename", "kyle", "obesity", "curves", "suburban", "touring", "clara", "vertex", "bw", "hepatitis", "nationally", "tomato", "andorra", "waterproof", "expired", "mj", "travels", "flush", "waiver", "pale", "specialties", "hayes", "humanitarian", "invitations", "functioning", "delight", "survivor", "garcia", "cingular", "economies", "alexandria", "bacterial", "moses", "counted", "undertake", "declare", "continuously", "johns", "valves", "gaps", "impaired", "achievements", "donors", "tear", "jewel", "teddy", "lf", "convertible", "ata", "teaches", "ventures", "nil", "bufing", "stranger", "tragedy", "julian", "nest", "pam", "dryer", "painful", "velvet", "tribunal", "ruled", "nato", "pensions", "prayers", "funky", "secretariat", "nowhere", "cop", "paragraphs", "gale", "joins", "adolescent", "nominations", "wesley", "dim", "lately", "cancelled", "scary", "mattress", "mpegs", "brunei", "likewise", "banana", "introductory", "slovak", "cakes", "stan", "reservoir", "occurrence", "idol", "mixer", "remind", "wc", "worcester", "sbjct", "demographic", "charming", "mai", "tooth", "disciplinary", "annoying", "respected", "stays", "disclose", "affair", "drove", "washer", "upset", "restrict", "springer", "beside", "mines", "portraits", "rebound", "logan", "mentor", "interpreted", "evaluations", "fought", "baghdad", "elimination", "metres", "hypothetical", "immigrants", "complimentary", "helicopter", "pencil", "freeze", "hk", "performer", "abu", "titled", "commissions", "sphere", "powerseller", "moss", "ratios", "concord", "graduated", "endorsed", "ty", "surprising", "walnut", "lance", "ladder", "italia", "unnecessary", "dramatically", "liberia", "sherman", "cork", "maximize", "cj", "hansen", "senators", "workout", "mali", "yugoslavia", "bleeding", "characterization", "colon", "likelihood", "lanes", "purse", "fundamentals", "contamination", "mtv", "endangered", "compromise", "masturbation", "optimize", "stating", "dome", "caroline", "leu", "expiration", "namespace", "align", "peripheral", "bless", "engaging", "negotiation", "crest", "opponents", "triumph", "nominated", "confidentiality", "electoral", "changelog", "welding", "deferred", "alternatively", "heel", "alloy", "condos", "plots", "polished", "yang", "gently", "greensboro", "tulsa", "locking", "casey", "controversial", "draws", "fridge", "blanket", "bloom", "qc", "simpsons", "lou", "elliott", "recovered", "fraser", "justify", "upgrading", "blades", "pgp", "loops", "surge", "frontpage", "trauma", "aw", "tahoe", "advert", "possess", "demanding", "defensive", "sip", "flashers", "subaru", "forbidden", "tf", "vanilla", "programmers", "pj", "monitored", "installations", "deutschland", "picnic", "souls", "arrivals", "spank", "cw", "practitioner", "motivated", "wr", "dumb", "smithsonian", "hollow", "vault", "securely", "examining", "fioricet", "groove", "revelation", "rg", "pursuit", "delegation", "wires", "bl", "dictionaries", "mails", "backing", "greenhouse", "sleeps", "vc", "blake", "transparency", "dee", "travis", "wx", "endless", "figured", "orbit", "currencies", "niger", "bacon", "survivors", "positioning", "heater", "colony", "cannon", "circus", "promoted", "forbes", "mae", "moldova", "mel", "descending", "paxil", "spine", "trout", "enclosed", "feat", "temporarily", "ntsc", "cooked", "thriller", "transmit", "apnic", "fatty", "gerald", "pressed", "frequencies", "scanned", "reflections", "hunger", "mariah", "sic", "municipality", "usps", "joyce", "detective", "surgeon", "cement", "experiencing", "fireplace", "endorsement", "bg", "planners", "disputes", "textiles", "missile", "intranet", "closes", "seq", "psychiatry", "persistent", "deborah", "conf", "marco", "assists", "summaries", "glow", "gabriel", "auditor", "wma", "aquarium", "violin", "prophet", "cir", "bracket", "looksmart", "isaac", "oxide", "oaks", "magnificent", "erik", "colleague", "naples", "promptly", "modems", "adaptation", "hu", "harmful", "paintball", "prozac", "sexually", "enclosure", "acm", "dividend", "newark", "kw", "paso", "glucose", "phantom", "norm", "playback", "supervisors", "westminster", "turtle", "ips", "distances", "absorption", "treasures", "dsc", "warned", "neural", "ware", "fossil", "mia", "hometown", "badly", "transcripts", "apollo", "wan", "disappointed", "persian", "continually", "communist", "collectible", "handmade", "greene", "entrepreneurs", "robots", "grenada", "creations", "jade", "scoop", "acquisitions", "foul", "keno", "gtk", "earning", "mailman", "sanyo", "nested", "biodiversity", "excitement", "somalia", "movers", "verbal", "blink", "presently", "seas", "carlo", "workflow", "mysterious", "novelty", "bryant", "tiles", "voyuer", "librarian", "subsidiaries", "switched", "stockholm", "tamil", "garmin", "ru", "pose", "fuzzy", "indonesian", "grams", "therapist", "richards", "mrna", "budgets", "toolkit", "promising", "relaxation", "goat", "render", "carmen", "ira", "sen", "thereafter", "hardwood", "erotica", "temporal", "sail", "forge", "commissioners", "dense", "dts", "brave", "forwarding", "qt", "awful", "nightmare", "airplane", "reductions", "southampton", "istanbul", "impose", "organisms", "sega", "telescope", "viewers", "asbestos", "portsmouth", "cdna", "meyer", "enters", "pod", "savage", "advancement", "wu", "harassment", "willow", "resumes", "bolt", "gage", "throwing", "existed", "generators", "lu", "wagon", "barbie", "dat", "soa", "knock", "urge", "smtp", "generates", "potatoes", "thorough", "replication", "inexpensive", "kurt", "receptors", "peers", "roland", "optimum", "neon", "interventions", "quilt", "huntington", "creature", "ours", "mounts", "syracuse", "internship", "lone", "refresh", "aluminium", "snowboard", "beastality", "webcast", "michel", "evanescence", "subtle", "coordinated", "notre", "shipments", "maldives", "stripes", "firmware", "antarctica", "cope", "shepherd", "lm", "canberra", "cradle", "chancellor", "mambo", "lime", "kirk", "flour", "controversy", "legendary", "bool", "sympathy", "choir", "avoiding", "beautifully", "blond", "expects", "cho", "jumping", "fabrics", "antibodies", "polymer", "hygiene", "wit", "poultry", "virtue", "burst", "examinations", "surgeons", "bouquet", "immunology", "promotes", "mandate", "wiley", "departmental", "bbs", "spas", "ind", "corpus", "johnston", "terminology", "gentleman", "fibre", "reproduce", "convicted", "shades", "jets", "indices", "roommates", "adware", "qui", "intl", "threatening", "spokesman", "zoloft", "activists", "frankfurt", "prisoner", "daisy", "halifax", "encourages", "ultram", "cursor", "assembled", "earliest", "donated", "stuffed", "restructuring", "insects", "terminals", "crude", "morrison", "maiden", "simulations", "cz", "sufficiently", "examines", "viking", "myrtle", "bored", "cleanup", "yarn", "knit", "conditional", "mug", "crossword", "bother", "budapest", "conceptual", "knitting", "attacked", "hl", "bhutan", "liechtenstein", "mating", "compute", "redhead", "arrives", "translator", "automobiles", "tractor", "allah", "continent", "ob", "unwrap", "fares", "longitude", "resist", "challenged", "telecharger", "hoped", "pike", "safer", "insertion", "instrumentation", "ids", "hugo", "wagner", "constraint", "groundwater", "touched", "strengthening", "cologne", "gzip", "wishing", "ranger", "smallest", "insulation", "newman", "marsh", "ricky", "ctrl", "scared", "theta", "infringement", "bent", "laos", "subjective", "monsters", "asylum", "lightbox", "robbie", "stake", "cocktail", "outlets", "swaziland", "varieties", "arbor", "mediawiki", "configurations", "poison"] \ No newline at end of file diff --git a/docs/res/metrics.png b/docs/res/metrics.png new file mode 100644 index 000000000..cc56624af Binary files /dev/null and b/docs/res/metrics.png differ diff --git a/docs/res/poster.png b/docs/res/poster.png new file mode 100644 index 000000000..30fd375df Binary files /dev/null and b/docs/res/poster.png differ diff --git a/docs/res/preview-app.png b/docs/res/preview-app.png new file mode 100644 index 000000000..4f0843a7a Binary files /dev/null and b/docs/res/preview-app.png differ diff --git a/docs/res/sample.png b/docs/res/sample.png new file mode 100644 index 000000000..f32ec5484 Binary files /dev/null and b/docs/res/sample.png differ diff --git a/docs/serve.sh b/docs/serve.sh new file mode 100755 index 000000000..c9cb7b6ef --- /dev/null +++ b/docs/serve.sh @@ -0,0 +1,20 @@ +#!/bin/sh +set -e +cd "$(dirname "$0")" + +if (which caddy >/dev/null); then + caddy_args=(\ + -host localhost \ + "bind localhost" \ + "mime .woff2 font/woff2" \ + "mime .woff application/font-woff" \ + ) + caddy "${caddy_args[@]}" +elif (which servedir >/dev/null); then + servedir +else + echo "Can not find 'caddy' nor 'servedir' in PATH." >&2 + echo "Install caddy from brew, apt or https://caddyserver.com/download" + echo "or install servedir with 'npm install -g secure-servedir'" + exit 1 +fi diff --git a/init.sh b/init.sh new file mode 100755 index 000000000..3141ea111 --- /dev/null +++ b/init.sh @@ -0,0 +1,289 @@ +#!/bin/bash + +SRCDIR=$(dirname "${BASH_SOURCE[0]}") +BUILD_DIR=$SRCDIR/build + +if [[ "${BUILD_DIR:0:2}" == "./" ]]; then + BUILD_DIR=${BUILD_DIR:2} +fi + +DIST_DIR=$BUILD_DIR/dist +BUILD_TMP_DIR=$BUILD_DIR/tmp +VENV_DIR=$BUILD_DIR/venv + +if [[ "${BASH_SOURCE[0]}" != "${0}" ]]; then + # sourced + if [[ -z $VIRTUAL_ENV ]] && [[ ! -f "$VENV_DIR/bin/activate" ]]; then + echo "Project not configured." >&2 + echo "Execute this script instead of sourcing it to perform setup." >&2 + else + source "$VENV_DIR/bin/activate" + pushd "$SRCDIR" >/dev/null + SRCDIR_ABS=$(pwd) + popd >/dev/null + export PYTHONPATH=$SRCDIR_ABS/misc/pylib + fi +else + # Subshell + set -e + cd "$SRCDIR" + + # ———————————————————————————————————————————————————————————————————————————————————————————————— + # virtualenv + + mkdir -p "$VENV_DIR" + + pushd "$(dirname "$VENV_DIR")" >/dev/null + VENV_DIR_ABS=$(pwd)/$(basename "$VENV_DIR") + popd >/dev/null + + # must check and set VENV_ACTIVE before polluting local env + VENV_ACTIVE=false + if [[ "$VIRTUAL_ENV" == "$VENV_DIR_ABS" ]] && [[ "$1" != "-force" ]]; then + VENV_ACTIVE=true + fi + + if ! (which virtualenv >/dev/null); then + echo "$0: Can't find virtualenv in PATH -- install through 'pip install --user virtualenv'" >&2 + exit 1 + fi + + if [[ ! -d "$VENV_DIR/bin" ]]; then + echo "Setting up virtualenv in '$VENV_DIR'" + virtualenv "$VENV_DIR" + else + if [[ ! -z $VIRTUAL_ENV ]] && [[ "$VIRTUAL_ENV" != "$VENV_DIR_ABS" ]]; then + echo "Looks like the repository has moved location -- updating virtualenv" + virtualenv "$VENV_DIR" + fi + fi + + source "$VENV_DIR/bin/activate" + + UPDATE_TIMESTAMP_FILE="$VENV_DIR/last-pip-run.mark" + REQUIREMENTS_FILE=$SRCDIR/requirements.txt + + if [ "$REQUIREMENTS_FILE" -nt "$UPDATE_TIMESTAMP_FILE" ]; then + echo "pip install -r $REQUIREMENTS_FILE" + pip install -r "$REQUIREMENTS_FILE" + date '+%s' > "$UPDATE_TIMESTAMP_FILE" + fi + + # ———————————————————————————————————————————————————————————————————————————————————————————————— + # deps + DEPS_DIR=$BUILD_DIR/deps + PATCH_DIR=$(pwd)/misc/patches + mkdir -p "$DEPS_DIR" + + check_dep() { + NAME=$1 + REPO_URL=$2 + BRANCH=$3 + TREE_REF=$4 + set -e + REPODIR=$DEPS_DIR/$NAME + if [[ ! -d "$REPODIR/.git" ]]; then + rm -rf "$REPODIR" + echo "Fetching $NAME from $REPO_URL" + if ! (git clone --recursive --single-branch -b $BRANCH -- "$REPO_URL" "$REPODIR"); then + exit 1 + fi + if [[ ! -z $TREE_REF ]]; then + git -C "$REPODIR" checkout "$TREE_REF" + git -C "$REPODIR" submodule update + fi + return 1 + fi + # TODO: check that source matches tree ref + return 0 + } + + if ! (check_dep \ + woff2 https://github.com/google/woff2.git master 36e6555b92a1519c927ebd43b79621810bf17c1a ) + then + echo "Building woff2" + git -C "$DEPS_DIR/woff2" apply "$PATCH_DIR/woff2.patch" + if !(make -C "$DEPS_DIR/woff2" -j8 clean all); then + rm -rf "$DEPS_DIR/woff2" + exit 1 + fi + fi + if [[ ! -f "$VENV_DIR/bin/woff2_compress" ]]; then + ln -vfs ../../deps/woff2/woff2_compress "$VENV_DIR/bin" + fi + + # EOT is disabled + # if ! (check_dep \ + # ttf2eot https://github.com/rsms/ttf2eot.git master ) + # then + # echo "Building ttf2eot" + # make -C "$DEPS_DIR/ttf2eot" clean all + # fi + # if [[ ! -f "$VENV_DIR/bin/ttf2eot" ]]; then + # ln -vfs ../../deps/ttf2eot/ttf2eot "$VENV_DIR/bin" + # fi + + if [[ ! -f "$DEPS_DIR/ttfautohint" ]]; then + URL=https://download.savannah.gnu.org/releases/freetype/ttfautohint-1.6-tty-osx.tar.gz + echo "Fetching $URL" + curl '-#' -o "$DEPS_DIR/ttfautohint.tar.gz" -L "$URL" + tar -C "$DEPS_DIR" -xzf "$DEPS_DIR/ttfautohint.tar.gz" + rm "$DEPS_DIR/ttfautohint.tar.gz" + fi + if [[ ! -f "$VENV_DIR/bin/ttfautohint" ]]; then + ln -vfs ../../deps/ttfautohint "$VENV_DIR/bin" + fi + + if [[ ! -f "$VENV_DIR/bin/ttf2woff" ]] || [[ ! -f "$SRCDIR/misc/ttf2woff/ttf2woff" ]]; then + echo "Building ttf2woff" + make -C "$SRCDIR/misc/ttf2woff" -j8 + fi + if [[ ! -f "$VENV_DIR/bin/ttf2woff" ]]; then + ln -vfs ../../../misc/ttf2woff/ttf2woff "$VENV_DIR/bin" + fi + + # ———————————————————————————————————————————————————————————————————————————————————————————————— + # $BUILD_TMP_DIR + # create and mount spare disk image needed on macOS to support case-sensitive filenames + if [[ "$(uname)" = *Darwin* ]]; then + bash misc/mac-tmp-disk-mount.sh + else + mkdir -p "$BUILD_TMP_DIR" + fi + + # ———————————————————————————————————————————————————————————————————————————————————————————————— + # $BUILD_DIR/etc/generated.make + master_styles=( \ + Regular \ + Bold \ + ) + derived_styles=( \ + "RegularItalic : Regular" \ + "Medium : Regular Bold" \ + "MediumItalic : Regular Bold" \ + "BoldItalic : Bold" \ + # "Black : Regular Bold" \ + # "BlackItalic : Regular Bold" \ + ) + web_formats=( woff woff2 ) # Disabled/unused: eot + + mkdir -p "$BUILD_DIR/etc" + GEN_MAKE_FILE=$BUILD_DIR/etc/generated.make + + # Only generate if there are changes to the font sources + NEED_GENERATE=false + if [[ ! -f "$GEN_MAKE_FILE" ]] || [[ "$0" -nt "$GEN_MAKE_FILE" ]]; then + NEED_GENERATE=true + else + for style in "${master_styles[@]}"; do + if $NEED_GENERATE; then + break + fi + for srcfile in $(find src/Interface-${style}.ufo -type f -newer "$GEN_MAKE_FILE"); do + NEED_GENERATE=true + break + done + done + fi + + if $NEED_GENERATE; then + echo "Generating '$GEN_MAKE_FILE'" + echo "# Generated by init.sh -- do not modify manually" > "$GEN_MAKE_FILE" + + all_styles=() + + for style in "${master_styles[@]}"; do + all_styles+=( $style ) + echo "${style}_ufo_d := " \ + "\$(wildcard src/Interface-${style}.ufo/* src/Interface-${style}.ufo/*/*)" >> "$GEN_MAKE_FILE" + echo "$BUILD_TMP_DIR/InterfaceTTF/Interface-${style}.ttf: \$(${style}_ufo_d)" >> "$GEN_MAKE_FILE" + echo "$BUILD_TMP_DIR/InterfaceOTF/Interface-${style}.otf: \$(${style}_ufo_d)" >> "$GEN_MAKE_FILE" + done + + for e in "${derived_styles[@]}"; do + style=$(echo "${e%%:*}" | xargs) + dependent_styles=$(echo "${e#*:}" | xargs) + all_styles+=( $style ) + + echo -n "$BUILD_TMP_DIR/InterfaceTTF/Interface-${style}.ttf:" >> "$GEN_MAKE_FILE" + for depstyle in $dependent_styles; do + echo -n " \$(${depstyle}_ufo_d)" >> "$GEN_MAKE_FILE" + done + echo "" >> "$GEN_MAKE_FILE" + + echo -n "$BUILD_TMP_DIR/InterfaceOTF/Interface-${style}.otf:" >> "$GEN_MAKE_FILE" + for depstyle in $dependent_styles; do + echo -n " \$(${depstyle}_ufo_d)" >> "$GEN_MAKE_FILE" + done + echo "" >> "$GEN_MAKE_FILE" + done + + # STYLE and STYLE_ttf targets + for style in "${all_styles[@]}"; do + echo "${style}_ttf: $DIST_DIR/Interface-${style}.ttf" >> "$GEN_MAKE_FILE" + echo "${style}_otf: $DIST_DIR-unhinted/Interface-${style}.otf" >> "$GEN_MAKE_FILE" + echo "${style}_ttf_unhinted: $DIST_DIR-unhinted/Interface-${style}.ttf" >> "$GEN_MAKE_FILE" + + echo -n "${style}: ${style}_otf" >> "$GEN_MAKE_FILE" + for format in "${web_formats[@]}"; do + echo -n " $DIST_DIR/Interface-${style}.${format}" >> "$GEN_MAKE_FILE" + done + echo "" >> "$GEN_MAKE_FILE" + + echo -n "${style}_unhinted: ${style}_otf" >> "$GEN_MAKE_FILE" + for format in "${web_formats[@]}"; do + echo -n " $DIST_DIR-unhinted/Interface-${style}.${format}" >> "$GEN_MAKE_FILE" + done + echo "" >> "$GEN_MAKE_FILE" + done + + # all_otf target + echo -n "all_otf:" >> "$GEN_MAKE_FILE" + for style in "${all_styles[@]}"; do + echo -n " ${style}_otf" >> "$GEN_MAKE_FILE" + done + echo "" >> "$GEN_MAKE_FILE" + + # all_ttf target + echo -n "all_ttf:" >> "$GEN_MAKE_FILE" + for style in "${all_styles[@]}"; do + echo -n " ${style}_ttf" >> "$GEN_MAKE_FILE" + done + echo "" >> "$GEN_MAKE_FILE" + + echo -n "all_ttf_unhinted:" >> "$GEN_MAKE_FILE" + for style in "${all_styles[@]}"; do + echo -n " ${style}_ttf_unhinted" >> "$GEN_MAKE_FILE" + done + echo "" >> "$GEN_MAKE_FILE" + + # all_web target + echo -n "all_web:" >> "$GEN_MAKE_FILE" + for style in "${all_styles[@]}"; do + echo -n " ${style}" >> "$GEN_MAKE_FILE" + done + echo "" >> "$GEN_MAKE_FILE" + + echo -n "all_web_unhinted:" >> "$GEN_MAKE_FILE" + for style in "${all_styles[@]}"; do + echo -n " ${style}_unhinted" >> "$GEN_MAKE_FILE" + done + echo "" >> "$GEN_MAKE_FILE" + + + echo -n ".PHONY: all_ttf all_ttf_unhinted all_web all_web_unhinted all_otf" >> "$GEN_MAKE_FILE" + for style in "${all_styles[@]}"; do + echo -n " ${style} ${style}_ttf ${style}_ttf_unhinted ${style}_otf" >> "$GEN_MAKE_FILE" + done + echo "" >> "$GEN_MAKE_FILE" + fi + + # ———————————————————————————————————————————————————————————————————————————————————————————————— + # summary + if ! $VENV_ACTIVE; then + echo "You now need to activate virtualenv by:" + echo " source '$0'" + echo "Or directly by sourcing the activate script:" + echo " source '$VENV_DIR/bin/activate'" + fi +fi diff --git a/misc/cleanup-kerning.py b/misc/cleanup-kerning.py new file mode 100755 index 000000000..03ddffefd --- /dev/null +++ b/misc/cleanup-kerning.py @@ -0,0 +1,353 @@ +#!/usr/bin/env python +# encoding: utf8 +from __future__ import print_function +import os, sys, plistlib, re +from collections import OrderedDict +from ConfigParser import RawConfigParser +from argparse import ArgumentParser +from fontTools import ttLib +from robofab.objects.objectsRF import OpenFont + + +# Regex matching "default" glyph names, like "uni2043" and "u01C5" +uniNameRe = re.compile(r'^u(?:ni)([0-9A-F]{4,8})$') + + +def unicodeForDefaultGlyphName(glyphName): + m = uniNameRe.match(glyphName) + if m is not None: + try: + return int(m.group(1), 16) + except: + pass + return None + + +def canonicalGlyphName(glyphName, uc2names): + uc = unicodeForDefaultGlyphName(glyphName) + if uc is not None: + names = uc2names.get(uc) + if names is not None and len(names) > 0: + return names[0] + return glyphName + + + +def parseGlyphComposition(composite): + c = composite.split("=") + d = c[1].split("/") + glyphName = d[0] + if len(d) == 1: + offset = [0, 0] + else: + offset = [int(i) for i in d[1].split(",")] + accentString = c[0] + accents = accentString.split("+") + baseName = accents.pop(0) + accentNames = [i.split(":") for i in accents] + return (glyphName, baseName, accentNames, offset) + + +def loadGlyphCompositions(filename): # { glyphName => (baseName, accentNames, offset) } + compositions = OrderedDict() + with open(filename, 'r') as f: + for line in f: + line = line.strip() + if len(line) > 0 and line[0] != '#': + glyphName, baseName, accentNames, offset = parseGlyphComposition(line) + compositions[glyphName] = (baseName, accentNames, offset) + return compositions + + +def loadAGL(filename): # -> { 2126: 'Omega', ... } + m = {} + with open(filename, 'r') as f: + for line in f: + # Omega;2126 + # dalethatafpatah;05D3 05B2 # higher-level combinations; ignored + line = line.strip() + if len(line) > 0 and line[0] != '#': + name, uc = tuple([c.strip() for c in line.split(';')]) + if uc.find(' ') == -1: + # it's a 1:1 mapping + m[int(uc, 16)] = name + return m + + +def loadLocalNamesDB(fonts, agl, diacriticComps): + uc2names = None # { 2126: ['Omega', ...], ...} + allNames = set() # set('Omega', ...) + + for font in fonts: + _uc2names = font.getCharacterMapping() # { 2126: ['Omega', ...], ...} + if uc2names is None: + uc2names = _uc2names + else: + for uc, _names in _uc2names.iteritems(): + names = uc2names.setdefault(uc, []) + for name in _names: + if name not in names: + names.append(name) + for g in font: + allNames.add(g.name) + + # agl { 2126: 'Omega', ...} -> { 'Omega': [2126, ...], ...} + aglName2Ucs = {} + for uc, name in agl.iteritems(): + aglName2Ucs.setdefault(name, []).append(uc) + + for glyphName, comp in diacriticComps.iteritems(): + aglUCs = aglName2Ucs.get(glyphName) + if aglUCs is None: + uc = unicodeForDefaultGlyphName(glyphName) + if uc is not None: + glyphName2 = agl.get(uc) + if glyphName2 is not None: + glyphName = glyphName2 + names = uc2names.setdefault(uc, []) + if glyphName not in names: + names.append(glyphName) + allNames.add(glyphName) + else: + allNames.add(glyphName) + for uc in aglUCs: + names = uc2names.get(uc, []) + if glyphName not in names: + names.append(glyphName) + uc2names[uc] = names + + name2ucs = {} # { 'Omega': [2126, ...], ...} + for uc, names in uc2names.iteritems(): + for name in names: + name2ucs.setdefault(name, set()).add(uc) + + return uc2names, name2ucs, allNames + + +# def getNameToGroupsMap(groups): # => { glyphName => set(groupName) } +# nameMap = {} +# for groupName, glyphNames in groups.iteritems(): +# for glyphName in glyphNames: +# nameMap.setdefault(glyphName, set()).add(groupName) +# return nameMap + + +# def inspectKerning(kerning): +# leftIndex = {} # { glyph-name => } +# rightIndex = {} # { glyph-name => [(left-hand-side-name, kernVal), ...] } +# rightGroupIndex = {} # { group-name => [(left-hand-side-name, kernVal), ...] } +# for leftName, right in kerning.iteritems(): +# if leftName[0] != '@': +# leftIndex[leftName] = right +# for rightName, kernVal in right.iteritems(): +# if rightName[0] != '@': +# rightIndex.setdefault(rightName, []).append((leftName, kernVal)) +# else: +# rightGroupIndex.setdefault(rightName, []).append((leftName, kernVal)) +# return leftIndex, rightIndex, rightGroupIndex + + +class RefTracker: + def __init__(self): + self.refs = {} + + def incr(self, name): + self.refs[name] = self.refs.get(name, 0) + 1 + + def decr(self, name): # => bool hasNoRefs + r = self.refs.get(name) + + if r is None: + raise Exception('decr untracked ref ' + repr(name)) + + if r < 1: + raise Exception('decr already zero ref ' + repr(name)) + + if r == 1: + del self.refs[name] + return True + + self.refs[name] = r - 1 + + def __contains__(self, name): + return name in self.refs + + +def main(): + argparser = ArgumentParser(description='Remove unused kerning') + + argparser.add_argument( + '-dry', dest='dryRun', action='store_const', const=True, default=False, + help='Do not modify anything, but instead just print what would happen.') + + argparser.add_argument( + 'fontPaths', metavar='', type=str, nargs='+', help='UFO fonts to update') + + args = argparser.parse_args() + dryRun = args.dryRun + + agl = loadAGL('src/glyphlist.txt') # { 2126: 'Omega', ... } + diacriticComps = loadGlyphCompositions('src/diacritics.txt') # {glyphName => (baseName, a, o)} + + for fontPath in args.fontPaths: + print(fontPath) + + groupsFilename = os.path.join(fontPath, 'groups.plist') + kerningFilename = os.path.join(fontPath, 'kerning.plist') + + groups = plistlib.readPlist(groupsFilename) # { groupName => [glyphName] } + kerning = plistlib.readPlist(kerningFilename) # { leftName => {rightName => kernVal} } + + font = OpenFont(fontPath) + uc2names, name2ucs, allNames = loadLocalNamesDB([font], agl, diacriticComps) + + # start with eliminating non-existent glyphs from groups and completely + # eliminate groups with all-dead glyphs. + eliminatedGroups = set() + for groupName, glyphNames in list(groups.items()): + glyphNames2 = [] + for name in glyphNames: + if name in allNames: + glyphNames2.append(name) + else: + name2 = canonicalGlyphName(name, uc2names) + if name2 != name and name2 in allNames: + print('group: rename glyph', name, '->', name2) + glyphNames2.append(name2) + + if len(glyphNames2) == 0: + print('group: eliminate', groupName) + eliminatedGroups.add(groupName) + del groups[groupName] + elif len(glyphNames2) != len(glyphNames): + print('group: shrink', groupName) + groups[groupName] = glyphNames2 + + # now eliminate kerning + groupRefs = RefTracker() # tracks group references, so we can eliminate unreachable ones + + for leftName, right in list(kerning.items()): + leftIsGroup = leftName[0] == '@' + + if leftIsGroup: + if leftName in eliminatedGroups: + print('kerning: eliminate LHS', leftName) + del kerning[leftName] + continue + groupRefs.incr(leftName) + else: + if leftName not in allNames: + print('kerning: eliminate LHS', leftName) + del kerning[leftName] + continue + + right2 = {} + for rightName, kernVal in right.iteritems(): + rightIsGroup = rightName[0] == '@' + if rightIsGroup: + if rightIsGroup in eliminatedGroups: + print('kerning: eliminate RHS group', rightName) + else: + groupRefs.incr(rightName) + right2[rightName] = kernVal + else: + if rightName not in allNames: + # maybe an unnamed glyph? + rightName2 = canonicalGlyphName(rightName, uc2names) + if rightName2 != rightName: + print('kerning: rename & update RHS glyph', rightName, '->', rightName2) + right2[rightName2] = kernVal + else: + print('kerning: eliminate RHS glyph', rightName) + else: + right2[rightName] = kernVal + + if len(right2) == 0: + print('kerning: eliminate LHS', leftName) + del kerning[leftName] + if leftIsGroup: + groupRefs.decr(leftName) + else: + kerning[leftName] = right2 + + # eliminate any unreferenced groups + for groupName, glyphNames in list(groups.items()): + if not groupName in groupRefs: + print('group: eliminate unreferenced group', groupName) + del groups[groupName] + + + # verify that there are no conflicting kerning pairs + pairs = {} # { key => [...] } + conflictingPairs = set() + + for leftName, right in kerning.iteritems(): + # expand LHS group -> names + topLeftName = leftName + for leftName in groups[leftName] if leftName[0] == '@' else [leftName]: + if leftName not in allNames: + raise Exception('unknown LHS glyph name ' + repr(leftName)) + keyPrefix = leftName + '+' + for rightName, kernVal in right.iteritems(): + # expand RHS group -> names + topRightName = rightName + for rightName in groups[rightName] if rightName[0] == '@' else [rightName]: + if rightName not in allNames: + raise Exception('unknown RHS glyph name ' + repr(rightName)) + # print(leftName, '+', rightName, '=>', kernVal) + key = keyPrefix + rightName + isConflict = key in pairs + pairs.setdefault(key, []).append(( topLeftName, topRightName, kernVal )) + if isConflict: + conflictingPairs.add(key) + + # # resolve pair conflicts by preferring pairs defined via group kerning + # for key in conflictingPairs: + # pairs = pairs[key] + # print('kerning: conflicting pairs %r: %r' % (key, pairs)) + # bestPair = None + # redundantPairs = [] + # for pair in pairs: + # leftName, rightName, kernVal = pair + # if bestPair is None: + # bestPair = pair + # else: + # bestLeftName, bestRightName, _ = bestPair + # bestScore = 0 + # score = 0 + # if bestLeftName[0] == '@': bestScore += 1 + # if bestRightName[0] == '@': bestScore += 1 + # if leftName[0] == '@': score += 1 + # if rightName[0] == '@': score += 1 + # if bestScore == 2: + # # doesn't get better than this + # break + # elif score > bestScore: + # redundantPairs.append(bestPair) + # bestPair = pair + # else: + # redundantPairs.append(pair) + # print('- keeping', bestPair) + # print('- eliminating', redundantPairs) + # for redundantPairs + + + # # eliminate any unreferenced groups + # for groupName, glyphNames in list(groups.items()): + # if not groupName in groupRefs: + # print('group: eliminate unreferenced group', groupName) + # del groups[groupName] + + + print('Write', groupsFilename) + if not dryRun: + plistlib.writePlist(groups, groupsFilename) + + print('Write', kerningFilename) + if not dryRun: + plistlib.writePlist(kerning, kerningFilename) + + # [end] for fontPath in args.fontPaths + + +main() diff --git a/misc/doc/install-mac.txt b/misc/doc/install-mac.txt new file mode 100644 index 000000000..b3aae815e --- /dev/null +++ b/misc/doc/install-mac.txt @@ -0,0 +1,24 @@ + +Installing on macOS: + +1. Open the "Interface (OTF)" folder +2. Select all font files +3. Right-click (or ctrl-click) the selected files + and choose "Open with..." → "Font Book" +4. Press the "Install" button + +If you get any errors, like Font Book saying there're duplicate fonts, +cancel the installation and instead try the instructions below: + + +Installing on macOS, manually: + +1. Copy the "Interface (OTF)" folder +2. Press cmd-shift-G in Finder +3. Enter "~/Library/Fonts" into the dialog that shows up and press RETURN. +4. Paste the "Interface (OTF)" folder. + +If you have a previous installation of Interface, you should make sure to +remove those fonts files before installing new ones. + +See https://github.com/rsms/interface for more information diff --git a/misc/doc/install-win.txt b/misc/doc/install-win.txt new file mode 100644 index 000000000..ed9dbf8b7 --- /dev/null +++ b/misc/doc/install-win.txt @@ -0,0 +1,19 @@ + +Installing on Windows 10: + +1. Open the "Interface (hinted TTF)" folder +2. Select all font files +3. Right-click the selected files and choose "Install" + + +Installing on Windows 10, manually: + +1. Double-click the downloaded zip file +2. Copy the "Interface (hinted TTF)" folder +3. Press Win-Q on your keyboard, then type "fonts" and hit ENTER +4. Paste the "Interface (hinted TTF)" folder. + +If you have a previous installation of Interface, you should make sure +to remove those fonts files before installing new ones. + +See https://github.com/rsms/interface for more information diff --git a/misc/e-alt-straight-close.glif b/misc/e-alt-straight-close.glif new file mode 100644 index 000000000..28266ecb6 --- /dev/null +++ b/misc/e-alt-straight-close.glif @@ -0,0 +1,37 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/misc/enrich-glypnames.py b/misc/enrich-glypnames.py new file mode 100755 index 000000000..b4c401217 --- /dev/null +++ b/misc/enrich-glypnames.py @@ -0,0 +1,650 @@ +#!/usr/bin/env python +# encoding: utf8 +from __future__ import print_function +import os +import sys +import argparse +import json +import plistlib +import re +from collections import OrderedDict +from textwrap import TextWrapper +from StringIO import StringIO +from ConfigParser import RawConfigParser +from fontTools import ttLib +from robofab.objects.objectsRF import RFont, OpenFont + +# from feaTools import parser as feaParser +# from feaTools.parser import parseFeatures +# from feaTools import FDKSyntaxFeatureWriter +# from fontbuild.features import updateFeature, compileFeatureRE + +# Regex matching "default" glyph names, like "uni2043" and "u01C5" +uniNameRe = re.compile(r'^u(?:ni)[0-9A-F]{4,8}$') + + +def defaultGlyphName(uc): + return 'uni%04X' % uc + +def defaultGlyphName2(uc): + return 'u%04X' % uc + + +def isDefaultGlyphName(name): + return True if uniNameRe.match(name) else False + + +def isDefaultGlyphNameForUnicode(name, uc): + return name == defaultGlyphName(uc) or name == defaultGlyphName2(uc) + + +def getFirstNonDefaultGlyphName(uc, names): + for name in names: + if not isDefaultGlyphNameForUnicode(name, uc): + return name + return None + + +def getTTGlyphList(font): # -> { 'Omega': [2126, ...], ... } + if isinstance(font, str): + font = ttLib.TTFont(font) + + if not 'cmap' in font: + raise Exception('missing cmap table') + + gl = {} + bestCodeSubTable = None + bestCodeSubTableFormat = 0 + + for st in font['cmap'].tables: + if st.platformID == 0: # 0=unicode, 1=mac, 2=(reserved), 3=microsoft + if st.format > bestCodeSubTableFormat: + bestCodeSubTable = st + bestCodeSubTableFormat = st.format + + if bestCodeSubTable is not None: + for cp, glyphname in bestCodeSubTable.cmap.items(): + if glyphname in gl: + gl[glyphname].append(cp) + else: + gl[glyphname] = [cp] + + return gl, font + + +def getUFOGlyphList(font): # -> { 'Omega': [2126, ...], ... } + # Note: font.getCharacterMapping() returns {2126:['Omega', ...], ...} + gl = {} + for g in font: + ucv = g.unicodes + if len(ucv) > 0: + gl[g.name] = ucv + return gl + + +def appendNames(uc2names, extraUc2names, uc, name, isDestination): + if uc in uc2names: + names = uc2names[uc] + if name not in names: + names.append(name) + elif isDestination: + uc2names[uc] = [name] + else: + if uc in extraUc2names: + names = extraUc2names[uc] + if name not in names: + names.append(name) + else: + extraUc2names[uc] = [name] + + +def buildGlyphNames(dstFonts, srcFonts, glyphOrder, fallbackGlyphNames): + # fallbackGlyphNames: { 2126: 'Omega', ...} + uc2names = {} # { 2126: ['Omega', 'Omegagreek', ...], ...} + extraUc2names = {} # { 2126: ['Omega', 'Omegagreek', ...], ...} + # -- codepoints in Nth fonts, not found in first font + name2ucsv = [] # [ { 'Omega': [2126, ...] }, ... ] -- same order as fonts + + fontIndex = 0 + for font in dstFonts + srcFonts: + gl = None + if isinstance(font, RFont): + print('Inspecting', font.info.familyName, font.info.styleName) + gl = getUFOGlyphList(font) + else: + print('Inspecting', font) + gl, font = getTTGlyphList(font) + + name2ucsv.append(gl) + + isDestination = fontIndex < len(dstFonts) + + for name, unicodes in gl.iteritems(): + # if len(uc2names) > 100: break + for uc in unicodes: + appendNames(uc2names, extraUc2names, uc, name, isDestination) + if isDestination: + fallbackName = fallbackGlyphNames.get(uc) + if fallbackName is not None: + appendNames(uc2names, extraUc2names, uc, fallbackName, isDestination) + + fontIndex += 1 + + # for name in glyphOrder: + # if len(name) > 7 and name.startswith('uni') and name.find('.') == -1 and name.find('_') == -1: + # try: + # print('name: %r, %r' % (name, name[3:])) + # uc = int(name[3:], 16) + # appendNames(uc2names, extraUc2names, uc, name, isDestination=True) + # except: + # print() + # pass + + return uc2names, extraUc2names, name2ucsv + + +def renameStrings(listofstrs, newNames): + v = [] + for s in listofstrs: + s2 = newNames.get(s) + if s2 is not None: + s = s2 + v.append(s) + return v + + +def renameUFOLib(ufoPath, newNames, dryRun=False, print=print): + filename = os.path.join(ufoPath, 'lib.plist') + plist = plistlib.readPlist(filename) + + glyphOrder = plist.get('public.glyphOrder') + if glyphOrder is not None: + plist['public.glyphOrder'] = renameStrings(glyphOrder, newNames) + + roboSort = plist.get('com.typemytype.robofont.sort') + if roboSort is not None: + for entry in roboSort: + if isinstance(entry, dict) and entry.get('type') == 'glyphList': + asc = entry.get('ascending') + desc = entry.get('descending') + if asc is not None: + entry['ascending'] = renameStrings(asc, newNames) + if desc is not None: + entry['descending'] = renameStrings(desc, newNames) + + print('Writing', filename) + if not dryRun: + plistlib.writePlist(plist, filename) + + +def renameUFOGroups(ufoPath, newNames, dryRun=False, print=print): + filename = os.path.join(ufoPath, 'groups.plist') + + plist = None + try: + plist = plistlib.readPlist(filename) + except: + return + + didChange = False + + for groupName, glyphNames in plist.items(): + for i in range(len(glyphNames)): + name = glyphNames[i] + if name in newNames: + didChange = True + glyphNames[i] = newNames[name] + + if didChange: + print('Writing', filename) + if not dryRun: + plistlib.writePlist(plist, filename) + + +def renameUFOKerning(ufoPath, newNames, dryRun=False, print=print): + filename = os.path.join(ufoPath, 'kerning.plist') + + plist = None + try: + plist = plistlib.readPlist(filename) + except: + return + + didChange = False + + newPlist = {} + for leftName, right in plist.items(): + if leftName in newNames: + didChange = True + leftName = newNames[leftName] + newRight = {} + for rightName, kernValue in plist.items(): + if rightName in newNames: + didChange = True + rightName = newNames[rightName] + newRight[rightName] = kernValue + newPlist[leftName] = right + + if didChange: + print('Writing', filename) + if not dryRun: + plistlib.writePlist(newPlist, filename) + + +def subFeaName(m, newNames, state): + try: + int(m[3], 16) + except: + return m[0] + + name = m[2] + + if name in newNames: + # print('sub %r => %r' % (m[0], m[1] + newNames[name] + m[4])) + if name == 'uni0402': + print('sub %r => %r' % (m[0], m[1] + newNames[name] + m[4])) + state['didChange'] = True + return m[1] + newNames[name] + m[4] + + return m[0] + + +FEA_TOK = 'tok' +FEA_SEP = 'sep' +FEA_END = 'end' + +def feaTokenizer(feaText): + separators = set('; \t\r\n,[]\'"') + tokStartIndex = -1 + sepStartIndex = -1 + + for i in xrange(len(feaText)): + ch = feaText[i] + if ch in separators: + if tokStartIndex != -1: + yield (FEA_TOK, feaText[tokStartIndex:i]) + tokStartIndex = -1 + if sepStartIndex == -1: + sepStartIndex = i + else: + if sepStartIndex != -1: + yield (FEA_SEP, feaText[sepStartIndex:i]) + sepStartIndex = -1 + if tokStartIndex == -1: + tokStartIndex = i + + if sepStartIndex != -1 and tokStartIndex != -1: + yield (FEA_END, feaText[min(sepStartIndex, tokStartIndex):]) + elif sepStartIndex != -1: + yield (FEA_END, feaText[sepStartIndex:]) + elif tokStartIndex != -1: + yield (FEA_END, feaText[tokStartIndex:]) + else: + yield (FEA_END, '') + + +def renameUFOFeatures(font, ufoPath, newNames, dryRun=False, print=print): + filename = os.path.join(ufoPath, 'features.fea') + + feaText = '' + try: + with open(filename, 'r') as f: + feaText = f.read() + except: + return + + didChange = False + feaText2 = '' + + for t, v in feaTokenizer(feaText): + if t is FEA_TOK and len(v) > 6 and v.startswith('uni'): + if v in newNames: + # print('sub', v, newNames[v]) + didChange = True + v = newNames[v] + feaText2 += v + + feaText = feaText2 + + if didChange: + print('Writing', filename) + if not dryRun: + with open(filename, 'w') as f: + f.write(feaText) + print( + 'Important: you need to manually verify that', filename, 'looks okay.', + 'We did an optimistic update which is not perfect.' + ) + + # classes = feaParser.classDefinitionRE.findall(feaText) + # for precedingMark, className, classContent in classes: + # content = feaParser.classContentRE.findall(classContent) + # print('class', className, content) + + # didChange = False + # content2 = [] + # for name in content: + # if name in newNames: + # didChange = True + # content2.append(newNames[name]) + # if didChange: + # print('content2', content2) + # feaText = feaParser.classDefinitionRE.sub('', feaText) + + # featureTags = feaParser.feature_findAll_RE.findall(feaText) + # for precedingMark, featureTag in featureTags: + # print('feat', featureTag) + + +def renameUFODetails(font, ufoPath, newNames, dryRun=False, print=print): + renameUFOLib(ufoPath, newNames, dryRun, print) + renameUFOGroups(ufoPath, newNames, dryRun, print) + renameUFOKerning(ufoPath, newNames, dryRun, print) + renameUFOFeatures(font, ufoPath, newNames, dryRun, print) + + +def readLines(filename): + with open(filename, 'r') as f: + return f.read().strip().splitlines() + + +def readGlyphOrderFile(filename): + names = [] + for line in readLines(filename): + line = line.lstrip() + if len(line) > 0 and line[0] != '#': + names.append(line) + return names + + +def renameGlyphOrderFile(filename, newNames, dryRun=False, print=print): + lines = [] + didRename = False + for line in readLines(filename): + line = line.lstrip() + if len(line) > 0 and line[0] != '#': + newName = newNames.get(line) + if newName is not None: + didRename = True + line = newName + lines.append(line) + if didRename: + print('Writing', filename) + if not dryRun: + with open(filename, 'w') as f: + f.write('\n'.join(lines)) + + +def parseGlyphComposition(composite): + c = composite.split("=") + d = c[1].split("/") + glyphName = d[0] + if len(d) == 1: + offset = [0, 0] + else: + offset = [int(i) for i in d[1].split(",")] + accentString = c[0] + accents = accentString.split("+") + baseName = accents.pop(0) + accentNames = [i.split(":") for i in accents] + return (glyphName, baseName, accentNames, offset) + + +def fmtGlyphComposition(glyphName, baseName, accentNames, offset): + # glyphName = 'uni03D3' + # baseName = 'uni03D2' + # accentNames = [['tonos', 'top'], ['acute', 'top']] + # offset = [100, 0] + # => "uni03D2+tonos:top+acute:top=uni03D3/100,0" + s = baseName + for accentNameTuple in accentNames: + s += '+' + accentNameTuple[0] + if len(accentNameTuple) > 1: + s += ':' + accentNameTuple[1] + s += '=' + glyphName + if offset[0] != 0 or offset[1] != 0: + s += '/%d,%d' % tuple(offset) + return s + + +def renameDiacriticsFile(filename, newNames, dryRun=False, print=print): + lines = [] + didRename = False + for line in readLines(filename): + line = line.strip() + if len(line) > 0 and line[0] != '#': + glyphName, baseName, accentNames, offset = parseGlyphComposition(line) + + # rename + glyphName = newNames.get(glyphName, glyphName) + baseName = newNames.get(baseName, baseName) + for accentTuple in accentNames: + accentTuple[0] = newNames.get(accentTuple[0], accentTuple[0]) + + line2 = fmtGlyphComposition(glyphName, baseName, accentNames, offset) + + if line != line2: + line = line2 + didRename = True + # print(line, '=>', line2) + + lines.append(line) + + if didRename: + print('Writing', filename) + if not dryRun: + with open(filename, 'w') as f: + f.write('\n'.join(lines)) + + +def configFindResFile(config, basedir, name): + fn = os.path.join(basedir, config.get("res", name)) + if not os.path.isfile(fn): + basedir = os.path.dirname(basedir) + fn = os.path.join(basedir, config.get("res", name)) + if not os.path.isfile(fn): + fn = None + return fn + + +def renameConfigFile(config, filename, newNames, dryRun=False, print=print): + wrapper = TextWrapper() + wrapper.width = 80 + wrapper.break_long_words = False + wrapper.break_on_hyphens = False + + wrap = lambda names: '\n'.join(wrapper.wrap(' '.join(names))) + + didRename = False + for propertyName, values in config.items('glyphs'): + glyphNames = values.split() + # print(propertyName, glyphNames) + propChanged = False + for name in glyphNames: + if name in newNames: + sectionChanged = True + if sectionChanged: + config.set('glyphs', propertyName, wrap(glyphNames)+'\n') + didRename = True + + # config.set(section, option, value) + if didRename: + s = StringIO() + config.write(s) + s = s.getvalue() + s = re.sub(r'\n(\w+)\s+=\s*', '\n\\1: ', s, flags=re.M) + s = re.sub(r'((?:^|\n)\[[^\]]*\])', '\\1\n', s, flags=re.M) + s = re.sub(r'\n\t\n', '\n\n', s, flags=re.M) + s = s.strip() + '\n' + print('Writing', filename) + if not dryRun: + with open(filename, 'w') as f: + f.write(s) + + +def parseAGL(filename): # -> { 2126: 'Omega', ... } + m = {} + for line in readLines(filename): + # Omega;2126 + # dalethatafpatah;05D3 05B2 # higher-level combinations; ignored + line = line.strip() + if len(line) > 0 and line[0] != '#': + name, uc = tuple([c.strip() for c in line.split(';')]) + if uc.find(' ') == -1: + # it's a 1:1 mapping + m[int(uc, 16)] = name + return m + + +def main(): + argparser = argparse.ArgumentParser(description='Enrich UFO glyphnames') + + argparser.add_argument( + '-dry', dest='dryRun', action='store_const', const=True, default=False, + help='Do not modify anything, but instead just print what would happen.') + + argparser.add_argument( + '-list-missing', dest='listMissing', action='store_const', const=True, default=False, + help='List glyphs with unicodes found in source files but missing in any of the target UFOs.') + + argparser.add_argument( + '-list-unnamed', dest='listUnnamed', action='store_const', const=True, default=False, + help="List glyphs with unicodes in target UFOs that don't have symbolic names.") + + argparser.add_argument( + '-backfill-agl', dest='backfillWithAgl', action='store_const', const=True, default=False, + help="Use glyphnames from Adobe Glyph List for any glyphs that no names in any of"+ + " the input font files") + + argparser.add_argument( + '-src', dest='srcFonts', metavar='', type=str, nargs='*', + help='TrueType, OpenType or UFO fonts to gather glyph info from. '+ + 'Names found in earlier-listed fonts are prioritized over later listings.') + + argparser.add_argument( + 'dstFonts', metavar='', type=str, nargs='+', help='UFO fonts to update') + + args = argparser.parse_args() + + # Load UFO fonts + dstFonts = [] + dstFontPaths = {} # keyed by RFont object + srcDir = None + for fn in args.dstFonts: + fn = fn.rstrip('/') + font = OpenFont(fn) + dstFonts.append(font) + dstFontPaths[font] = fn + srcDir2 = os.path.dirname(fn) + if srcDir is None: + srcDir = srcDir2 + elif srcDir != srcDir2: + raise Exception('All s must be rooted in same directory') + + # load fontbuild configuration + config = RawConfigParser(dict_type=OrderedDict) + configFilename = os.path.join(srcDir, 'fontbuild.cfg') + config.read(configFilename) + glyphOrderFile = configFindResFile(config, srcDir, 'glyphorder') + diacriticsFile = configFindResFile(config, srcDir, 'diacriticfile') + glyphOrder = readGlyphOrderFile(glyphOrderFile) + + fallbackGlyphNames = {} # { 2126: 'Omega', ... } + if args.backfillWithAgl: + fallbackGlyphNames = parseAGL(configFindResFile(config, srcDir, 'agl_glyphlistfile')) + + # find glyph names + uc2names, extraUc2names, name2ucsv = buildGlyphNames( + dstFonts, + args.srcFonts, + glyphOrder, + fallbackGlyphNames + ) + # Note: name2ucsv has same order as parameters to buildGlyphNames + + if args.listMissing: + print('# Missing glyphs: (found in -src but not in any )') + for uc, names in extraUc2names.iteritems(): + print('U+%04X\t%s' % (uc, ', '.join(names))) + return + + elif args.listUnnamed: + print('# Unnamed glyphs:') + unnamed = set() + for name in glyphOrder: + if len(name) > 7 and name.startswith('uni'): + unnamed.add(name) + for gl in name2ucsv[:len(dstFonts)]: + for name, ucs in gl.iteritems(): + for uc in ucs: + if isDefaultGlyphNameForUnicode(name, uc): + unnamed.add(name) + break + for name in unnamed: + print(name) + return + + printDry = lambda *args: print(*args) + if args.dryRun: + printDry = lambda *args: print('[dry-run]', *args) + + newNames = {} + renameGlyphsQueue = {} # keyed by RFont object + + for font in dstFonts: + renameGlyphsQueue[font] = {} + + for uc, names in uc2names.iteritems(): + if len(names) < 2: + continue + dstGlyphName = names[0] + if isDefaultGlyphNameForUnicode(dstGlyphName, uc): + newGlyphName = getFirstNonDefaultGlyphName(uc, names[1:]) + # if newGlyphName is None: + # # if we found no symbolic name, check in fallback list + # newGlyphName = fallbackGlyphNames.get(uc) + # if newGlyphName is not None: + # printDry('Using fallback %s' % newGlyphName) + if newGlyphName is not None: + printDry('Rename %s -> %s' % (dstGlyphName, newGlyphName)) + for font in dstFonts: + if dstGlyphName in font: + renameGlyphsQueue[font][dstGlyphName] = newGlyphName + newNames[dstGlyphName] = newGlyphName + + if len(newNames) == 0: + printDry('No changes') + return + + # rename component instances + for font in dstFonts: + componentMap = font.getReverseComponentMapping() + for currName, newName in renameGlyphsQueue[font].iteritems(): + for depName in componentMap.get(currName, []): + depG = font[depName] + for c in depG.components: + if c.baseGlyph == currName: + c.baseGlyph = newName + c.setChanged() + + # rename glyphs + for font in dstFonts: + for currName, newName in renameGlyphsQueue[font].iteritems(): + font[currName].name = newName + + # save fonts and update font data + for font in dstFonts: + fontPath = dstFontPaths[font] + printDry('Saving %d glyphs in %s' % (len(newNames), fontPath)) + if not args.dryRun: + font.save() + renameUFODetails(font, fontPath, newNames, dryRun=args.dryRun, print=printDry) + + # update resource files + renameGlyphOrderFile(glyphOrderFile, newNames, dryRun=args.dryRun, print=printDry) + renameDiacriticsFile(diacriticsFile, newNames, dryRun=args.dryRun, print=printDry) + renameConfigFile(config, configFilename, newNames, dryRun=args.dryRun, print=printDry) + + +if __name__ == '__main__': + main() diff --git a/misc/fixup-diacritics.py b/misc/fixup-diacritics.py new file mode 100755 index 000000000..2453e7f3c --- /dev/null +++ b/misc/fixup-diacritics.py @@ -0,0 +1,167 @@ +#!/usr/bin/env python +# encoding: utf8 +from __future__ import print_function +import os, sys, plistlib, re +from collections import OrderedDict +from ConfigParser import RawConfigParser +from argparse import ArgumentParser +from robofab.objects.objectsRF import OpenFont + + +# Regex matching "default" glyph names, like "uni2043" and "u01C5" +uniNameRe = re.compile(r'^u(?:ni)([0-9A-F]{4,8})$') + + +def unicodeForDefaultGlyphName(glyphName): + m = uniNameRe.match(glyphName) + if m is not None: + try: + return int(m.group(1), 16) + except: + pass + return None + + +def canonicalGlyphName(glyphName, uc2names): + uc = unicodeForDefaultGlyphName(glyphName) + if uc is not None: + names = uc2names.get(uc) + if names is not None and len(names) > 0: + return names[0] + return glyphName + + + +def parseGlyphComposition(composite): + c = composite.split("=") + d = c[1].split("/") + glyphName = d[0] + if len(d) == 1: + offset = [0, 0] + else: + offset = [int(i) for i in d[1].split(",")] + accentString = c[0] + accents = accentString.split("+") + baseName = accents.pop(0) + accentNames = [i.split(":") for i in accents] + return (glyphName, baseName, accentNames, offset) + + +def fmtGlyphComposition(glyphName, baseName, accentNames, offset): + # glyphName = 'uni03D3' + # baseName = 'uni03D2' + # accentNames = [['tonos', 'top'], ['acute', 'top']] + # offset = [100, 0] + # => "uni03D2+tonos:top+acute:top=uni03D3/100,0" + s = baseName + for accentNameTuple in accentNames: + s += '+' + accentNameTuple[0] + if len(accentNameTuple) > 1: + s += ':' + accentNameTuple[1] + s += '=' + glyphName + if offset[0] != 0 or offset[1] != 0: + s += '/%d,%d' % tuple(offset) + return s + + +def loadGlyphCompositions(filename): # { glyphName => (baseName, accentNames, offset) } + compositions = OrderedDict() + with open(filename, 'r') as f: + for line in f: + line = line.strip() + if len(line) > 0 and line[0] != '#': + glyphName, baseName, accentNames, offset = parseGlyphComposition(line) + compositions[glyphName] = (baseName, accentNames, offset) + return compositions + + +def loadAGL(filename): # -> { 2126: 'Omega', ... } + m = {} + with open(filename, 'r') as f: + for line in f: + # Omega;2126 + # dalethatafpatah;05D3 05B2 # higher-level combinations; ignored + line = line.strip() + if len(line) > 0 and line[0] != '#': + name, uc = tuple([c.strip() for c in line.split(';')]) + if uc.find(' ') == -1: + # it's a 1:1 mapping + m[int(uc, 16)] = name + return m + + +def loadFontGlyphs(font): + uc2names = {} # { 2126: ['Omega', ...], ...} + name2ucs = {} # { 'Omega': [2126, ...], '.notdef': [], ...} + for g in font: + name = g.name + ucs = g.unicodes + name2ucs[name] = ucs + for uc in ucs: + names = uc2names.setdefault(uc, []) + if name not in names: + names.append(name) + return uc2names, name2ucs + + +def main(): + argparser = ArgumentParser(description='Fixup diacritic names') + + argparser.add_argument( + '-dry', dest='dryRun', action='store_const', const=True, default=False, + help='Do not modify anything, but instead just print what would happen.') + + argparser.add_argument( + 'fontPaths', metavar='', type=str, nargs='+', help='UFO fonts') + + args = argparser.parse_args() + dryRun = args.dryRun + + uc2names = {} + name2ucs = {} + + for fontPath in args.fontPaths: + font = OpenFont(fontPath) + _uc2names, _name2ucs = loadFontGlyphs(font) + for uc, _names in _uc2names.iteritems(): + names = uc2names.setdefault(uc, []) + for name in _names: + if name not in names: + names.append(name) + for name, _ucs in _name2ucs.iteritems(): + ucs = name2ucs.setdefault(name, []) + for uc in _ucs: + if uc not in ucs: + ucs.append(uc) + + agl = loadAGL('src/glyphlist.txt') # { 2126: 'Omega', ... } + + diacriticsFilename = 'src/diacritics.txt' + diacriticComps = loadGlyphCompositions(diacriticsFilename) # {glyphName => (baseName, a, o)} + + for glyphName, comp in list(diacriticComps.items()): + if glyphName not in name2ucs: + uc = unicodeForDefaultGlyphName(glyphName) + if uc is not None: + aglName = agl.get(uc) + if aglName is not None: + if aglName in diacriticComps: + raise Exception('composing same glyph with different names:', aglName, glyphName) + print('rename', glyphName, '->', aglName, '(U+%04X)' % uc) + del diacriticComps[glyphName] + diacriticComps[aglName] = comp + + lines = [] + for glyphName, comp in diacriticComps.iteritems(): + lines.append(fmtGlyphComposition(glyphName, *comp)) + # print('\n'.join(lines)) + print('Write', diacriticsFilename) + if not dryRun: + with open(diacriticsFilename, 'w') as f: + for line in lines: + f.write(line + '\n') + + + + +main() diff --git a/misc/fixup-features.py b/misc/fixup-features.py new file mode 100755 index 000000000..1c2c0d087 --- /dev/null +++ b/misc/fixup-features.py @@ -0,0 +1,324 @@ +#!/usr/bin/env python +# encoding: utf8 +from __future__ import print_function +import os, sys, plistlib, re +from collections import OrderedDict +from ConfigParser import RawConfigParser +from argparse import ArgumentParser +from robofab.objects.objectsRF import OpenFont +from fontTools.feaLib.parser import Parser as FeaParser +from fontTools.feaLib.builder import Builder as FeaBuilder +from fontTools.ttLib import TTFont + + +# Regex matching "default" glyph names, like "uni2043" and "u01C5" +uniNameRe = re.compile(r'^u(?:ni)([0-9A-F]{4,8})$') + + +def unicodeForDefaultGlyphName(glyphName): + m = uniNameRe.match(glyphName) + if m is not None: + try: + return int(m.group(1), 16) + except: + pass + return None + + +def canonicalGlyphName(glyphName, uc2names): + uc = unicodeForDefaultGlyphName(glyphName) + if uc is not None: + names = uc2names.get(uc) + if names is not None and len(names) > 0: + return names[0] + return glyphName + + + +def parseGlyphComposition(composite): + c = composite.split("=") + d = c[1].split("/") + glyphName = d[0] + if len(d) == 1: + offset = [0, 0] + else: + offset = [int(i) for i in d[1].split(",")] + accentString = c[0] + accents = accentString.split("+") + baseName = accents.pop(0) + accentNames = [i.split(":") for i in accents] + return (glyphName, baseName, accentNames, offset) + + +def loadGlyphCompositions(filename): # { glyphName => (baseName, accentNames, offset) } + compositions = OrderedDict() + with open(filename, 'r') as f: + for line in f: + line = line.strip() + if len(line) > 0 and line[0] != '#': + glyphName, baseName, accentNames, offset = parseGlyphComposition(line) + compositions[glyphName] = (baseName, accentNames, offset) + return compositions + + +def loadAGL(filename): # -> { 2126: 'Omega', ... } + m = {} + with open(filename, 'r') as f: + for line in f: + # Omega;2126 + # dalethatafpatah;05D3 05B2 # higher-level combinations; ignored + line = line.strip() + if len(line) > 0 and line[0] != '#': + name, uc = tuple([c.strip() for c in line.split(';')]) + if uc.find(' ') == -1: + # it's a 1:1 mapping + m[int(uc, 16)] = name + return m + + +def loadLocalNamesDB(fonts, agl, diacriticComps): + uc2names = None # { 2126: ['Omega', ...], ...} + allNames = set() # set('Omega', ...) + + for font in fonts: + _uc2names = font.getCharacterMapping() # { 2126: ['Omega', ...], ...} + if uc2names is None: + uc2names = _uc2names + else: + for uc, _names in _uc2names.iteritems(): + names = uc2names.setdefault(uc, []) + for name in _names: + if name not in names: + names.append(name) + for g in font: + allNames.add(g.name) + + # agl { 2126: 'Omega', ...} -> { 'Omega': [2126, ...], ...} + aglName2Ucs = {} + for uc, name in agl.iteritems(): + aglName2Ucs.setdefault(name, []).append(uc) + + for glyphName, comp in diacriticComps.iteritems(): + aglUCs = aglName2Ucs.get(glyphName) + if aglUCs is None: + uc = unicodeForDefaultGlyphName(glyphName) + if uc is not None: + glyphName2 = agl.get(uc) + if glyphName2 is not None: + glyphName = glyphName2 + names = uc2names.setdefault(uc, []) + if glyphName not in names: + names.append(glyphName) + allNames.add(glyphName) + else: + allNames.add(glyphName) + for uc in aglUCs: + names = uc2names.get(uc, []) + if glyphName not in names: + names.append(glyphName) + uc2names[uc] = names + + name2ucs = {} # { 'Omega': [2126, ...], ...} + for uc, names in uc2names.iteritems(): + for name in names: + name2ucs.setdefault(name, set()).add(uc) + + return uc2names, name2ucs, allNames + + +def main(): + argparser = ArgumentParser(description='Fixup features.fea') + + argparser.add_argument( + '-dry', dest='dryRun', action='store_const', const=True, default=False, + help='Do not modify anything, but instead just print what would happen.') + + argparser.add_argument( + 'fontPaths', metavar='', type=str, nargs='+', help='UFO fonts to update') + + args = argparser.parse_args() + dryRun = args.dryRun + + agl = loadAGL('src/glyphlist.txt') # { 2126: 'Omega', ... } + diacriticComps = loadGlyphCompositions('src/diacritics.txt') # {glyphName => (baseName, a, o)} + + # collect glyph names + fonts = [OpenFont(fontPath) for fontPath in args.fontPaths] + uc2names, name2ucs, allNames = loadLocalNamesDB(fonts, agl, diacriticComps) + + # open feature.fea + featuresFilename = '' + featuresLines = [] + for fontPath in args.fontPaths: + try: + featuresFilename = os.path.join(fontPath, 'features.fea') + with open(featuresFilename, 'r') as f: + print('read', featuresFilename) + featuresLines = f.read().splitlines() + break + except: + pass + + classDefRe = re.compile(r'^@([^\s=]+)\s*=\s*\[([^\]]+)\]\s*;\s*$') + subRe = re.compile(r'^\s*sub\s+(.+)(\'?)\s+by\s+(.+)\s*;\s*$') + sub2Re = re.compile(r'^\s*sub\s+([^\[]+)\s+\[\s*([^\]]+)\s*\](\'?)\s+by\s+(.+)\s*;\s*$') + # sub lmidtilde [uni1ABB uni1ABD uni1ABE]' by uni1ABE.w2; + # sub lmidtilde uni1ABC' by uni1ABC.w2; + spacesRe = re.compile(r'[\s\r\n]+') + + classDefs = {} + featuresLines2 = [] + + for line in featuresLines: + clsM = classDefRe.match(line) + if clsM is not None: + clsName = clsM.group(1) + names = spacesRe.split(clsM.group(2).strip()) + if clsName in classDefs: + raise Exception('duplicate class definition ' + clsName) + # print('classdef', clsName, ' '.join(names)) + # print('classdef', clsName) + names2 = [] + for name in names: + if name == '-': + # e.g. A - Z + names2.append(name) + continue + if name[0] != '@': + canonName = canonicalGlyphName(name, uc2names) + if canonName != name: + # print('renaming ' + name + ' -> ' + canonName) + names2.append(canonName) + elif name not in allNames: + print('skipping unknown glyph ' + name) + else: + names2.append(name) + else: + raise Exception('todo: class-ref ' + name + ' in class-def ' + clsName) + classDefs[clsName] = names2 + line = '@%s = [ %s ];' % (clsName, ' '.join(names2)) + featuresLines2.append(line) + continue + + + # sub2M = sub2Re.match(line) + # if sub2M is not None: + # findNames1 = spacesRe.split(sub2M.group(1)) + # findNames2 = spacesRe.split(sub2M.group(2)) + # apos = sub2M.group(3) + # rightName = sub2M.group(4) + # print('TODO: sub2', findNames1, findNames2, apos, rightName) + # featuresLines2.append(line) + # continue + + + sub2M = sub2Re.match(line) + subM = None + if sub2M is None: + subM = subRe.match(line) + if subM is not None or sub2M is not None: + findNamesStr = '' + findNamesHasBrackets = False + findNames = [] + + findNamesBStr = '' + findNamesBHasBrackets = False + findNamesB = [] + + newNamesStr = '' + newNamesHasBrackets = False + newNames = [] + + apos0 = '' + + if subM is not None: + findNamesStr = subM.group(1) + apos0 = subM.group(2) + newNamesStr = subM.group(3) + else: # sub2M + findNamesStr = sub2M.group(1) + findNamesBStr = sub2M.group(2) + apos0 = sub2M.group(3) + newNamesStr = sub2M.group(4) + + if newNamesStr[0] == '[': + newNamesHasBrackets = True + newNamesStr = newNamesStr.strip('[ ]') + newNames = spacesRe.split(newNamesStr) + + if findNamesStr[0] == '[': + findNamesHasBrackets = True + findNamesStr = findNamesStr.strip('[ ]') + findNames = spacesRe.split(findNamesStr) + + if findNamesBStr != '': + if findNamesBStr[0] == '[': + findNamesBHasBrackets = True + findNamesBStr = findNamesBStr.strip('[ ]') + findNamesB = spacesRe.split(findNamesBStr) + + + names22 = [] + for names in [findNames, findNamesB, newNames]: + names2 = [] + for name in names: + if name[0] == '@': + clsName = name[1:].rstrip("'") + if clsName not in classDefs: + raise Exception('sub: missing target class ' + clsName + ' at\n' + line) + names2.append(name) + else: + apos = name[-1] == "'" + if apos: + name = name[:-1] + if name not in allNames: + canonName = canonicalGlyphName(name, uc2names) + if canonName != name: + print('renaming ' + name + ' -> ' + canonName) + name = canonName + else: + raise Exception('TODO: unknown name', name) + # if we remove names, we also need to remove subs (that become empty), and so on. + if apos: + name += "'" + names2.append(name) + names22.append(names2) + + findNames2, findNamesB2, newNames2 = names22 + + findNamesStr = ' '.join(findNames2) + if findNamesHasBrackets: findNamesStr = '[' + findNamesStr + ']' + + if findNamesBStr != '': + findNamesBStr = ' '.join(findNamesB2) + if findNamesBHasBrackets: findNamesBStr = '[' + findNamesBStr + ']' + + newNamesStr = ' '.join(newNames2) + if newNamesHasBrackets: newNamesStr = '[' + newNamesStr + ']' + + if subM is not None: + line = ' sub %s%s by %s;' % (findNamesStr, apos0, newNamesStr) + else: + # if subM is None: + # sub bbar [uni1ABB uni1ABD uni1ABE]' by uni1ABE.w2; + line = ' sub %s [%s]%s by %s;' % (findNamesStr, findNamesBStr, apos0, newNamesStr) + + featuresLines2.append(line) + + + print('Write', featuresFilename) + if not dryRun: + with open(featuresFilename + '2', 'w') as f: + for line in featuresLines2: + f.write(line + '\n') + + # FeaParser(featuresFilename + '2', allNames).parse() + + # font = TTFont('build/dist-unhinted/Interface-Regular.otf') + # FeaBuilder(font, featuresFilename + '2').build() + + + + + +main() diff --git a/misc/fixup-kerning.py b/misc/fixup-kerning.py new file mode 100755 index 000000000..fc4ce8071 --- /dev/null +++ b/misc/fixup-kerning.py @@ -0,0 +1,362 @@ +#!/usr/bin/env python +# encoding: utf8 +from __future__ import print_function +import os, sys, plistlib, json +from collections import OrderedDict +from ConfigParser import RawConfigParser +from argparse import ArgumentParser +from fontTools import ttLib +from robofab.objects.objectsRF import OpenFont + + +def getTTCharMap(font): # -> { 2126: 'Omegagreek', ...} + if isinstance(font, str): + font = ttLib.TTFont(font) + + if not 'cmap' in font: + raise Exception('missing cmap table') + + gl = {} + bestCodeSubTable = None + bestCodeSubTableFormat = 0 + + for st in font['cmap'].tables: + if st.platformID == 0: # 0=unicode, 1=mac, 2=(reserved), 3=microsoft + if st.format > bestCodeSubTableFormat: + bestCodeSubTable = st + bestCodeSubTableFormat = st.format + + if bestCodeSubTable is not None: + for cp, glyphname in bestCodeSubTable.cmap.items(): + if cp in gl: + raise Exception('duplicate unicode-to-glyphname mapping: U+%04X => %r and %r' % ( + cp, glyphname, gl[cp])) + gl[cp] = glyphname + + return gl + + +def revCharMap(ucToNames): + # {2126:['Omega','Omegagr']} -> {'Omega':2126, 'Omegagr':2126} + # {2126:'Omega'} -> {'Omega':2126} + m = {} + if len(ucToNames) == 0: + return m + + lists = True + for v in ucToNames.itervalues(): + lists = not isinstance(v, str) + break + + if lists: + for uc, names in ucToNames.iteritems(): + for name in names: + m[name] = uc + else: + for uc, name in ucToNames.iteritems(): + m[name] = uc + + return m + + +def getGlyphNameDifferenceMap(srcCharMap, dstCharMap, dstRevCharMap): + m = {} # { 'Omegagreek': 'Omega', ... } + for uc, srcName in srcCharMap.iteritems(): + dstNames = dstCharMap.get(uc) + if dstNames is not None and len(dstNames) > 0: + if len(dstNames) != 1: + print('warning: ignoring multi-glyph map for U+%04X in source font' % uc) + dstName = dstNames[0] + if srcName != dstName and srcName not in dstRevCharMap: + # Only include names that differ. also, The `srcName not in dstRevCharMap` condition + # makes sure that we don't rename glyphs that are already valid. + m[srcName] = dstName + return m + + +def fixupGroups(fontPath, dstGlyphNames, srcToDstMap, dryRun, stats): + filename = os.path.join(fontPath, 'groups.plist') + groups = plistlib.readPlist(filename) + groups2 = {} + glyphToGroups = {} + + for groupName, glyphNames in groups.iteritems(): + glyphNames2 = [] + for glyphName in glyphNames: + if glyphName in srcToDstMap: + gn2 = srcToDstMap[glyphName] + stats.renamedGlyphs[glyphName] = gn2 + glyphName = gn2 + if glyphName in dstGlyphNames: + glyphNames2.append(glyphName) + glyphToGroups[glyphName] = glyphToGroups.get(glyphName, []) + [groupName] + else: + stats.removedGlyphs.add(glyphName) + if len(glyphNames2) > 0: + groups2[groupName] = glyphNames2 + else: + stats.removedGroups.add(groupName) + + print('Writing', filename) + if not dryRun: + plistlib.writePlist(groups2, filename) + + return groups2, glyphToGroups + + +def fixupKerning(fontPath, dstGlyphNames, srcToDstMap, groups, glyphToGroups, dryRun, stats): + filename = os.path.join(fontPath, 'kerning.plist') + kerning = plistlib.readPlist(filename) + kerning2 = {} + groupPairs = {} # { "lglyphname+lglyphname": ("lgroupname"|"", "rgroupname"|"", 123) } + # pairs = {} # { "name+name" => 123 } + + for leftName, right in kerning.items(): + leftIsGroup = leftName[0] == '@' + leftGroupNames = None + + if leftIsGroup: + # left is a group + if leftName not in groups: + # dead group -- skip + stats.removedGroups.add(leftName) + continue + leftGroupNames = groups[leftName] + else: + if leftName in srcToDstMap: + leftName2 = srcToDstMap[leftName] + stats.renamedGlyphs[leftName] = leftName2 + leftName = leftName2 + if leftName not in dstGlyphNames: + # dead glyphname -- skip + stats.removedGlyphs.add(leftName) + continue + + right2 = {} + rightGroupNamesAndValues = [] + for rightName, kerningValue in right.iteritems(): + rightIsGroup = rightName[0] == '@' + if rightIsGroup: + if leftIsGroup and leftGroupNames is None: + leftGroupNames = [leftName] + if rightName in groups: + right2[rightName] = kerningValue + rightGroupNamesAndValues.append((groups[rightName], rightName, kerningValue)) + else: + stats.removedGroups.add(rightName) + else: + if rightName in srcToDstMap: + rightName2 = srcToDstMap[rightName] + stats.renamedGlyphs[rightName] = rightName2 + rightName = rightName2 + if rightName in dstGlyphNames: + right2[rightName] = kerningValue + if leftIsGroup: + rightGroupNamesAndValues.append(([rightName], '', kerningValue)) + else: + stats.removedGlyphs.add(rightName) + + if len(right2): + kerning2[leftName] = right2 + + # update groupPairs + lgroupname = leftName if rightIsGroup else '' + if leftIsGroup: + for lname in leftGroupNames: + kPrefix = lname + '+' + for rnames, rgroupname, kernv in rightGroupNamesAndValues: + for rname in rnames: + k = kPrefix + rname + v = (lgroupname, rgroupname, kernv) + if k in groupPairs: + raise Exception('duplicate group pair %s: %r and %r' % (k, groupPairs[k], v)) + groupPairs[k] = v + + elif leftIsGroup: + stats.removedGroups.add(leftName) + else: + stats.removedGlyphs.add(leftName) + + # print('groupPairs:', groupPairs) + + # remove individual pairs that are already represented through groups + kerning = kerning2 + kerning2 = {} + for leftName, right in kerning.items(): + leftIsGroup = leftName[0] == '@' + # leftNames = groups[leftName] if leftIsGroup else [leftName] + + if not leftIsGroup: + right2 = {} + for rightName, kernVal in right.iteritems(): + rightIsGroup = rightName[0] == '@' + if not rightIsGroup: + k = leftName + '+' + rightName + if k in groupPairs: + groupPair = groupPairs[k] + print(('simplify individual pair %r: kern %r (individual) -> %r (group)') % ( + k, kernVal, groupPair[2])) + stats.simplifiedKerningPairs.add(k) + else: + right2[rightName] = kernVal + else: + right2[rightName] = kernVal + else: + # TODO, probably + right2 = right + + kerning2[leftName] = right2 + + print('Writing', filename) + if not dryRun: + plistlib.writePlist(kerning2, filename) + + return kerning2 + + +def loadJSONCharMap(filename): + m = None + if filename == '-': + m = json.load(sys.stdin) + else: + with open(filename, 'r') as f: + m = json.load(f) + if not isinstance(m, dict): + raise Exception('json root is not a dict') + if len(m) > 0: + for k, v in m.iteritems(): + if not isinstance(k, int) and not isinstance(k, float): + raise Exception('json dict key is not a number') + if not isinstance(v, str): + raise Exception('json dict value is not a string') + break + return m + + +class Stats: + def __init__(self): + self.removedGroups = set() + self.removedGlyphs = set() + self.simplifiedKerningPairs = set() + self.renamedGlyphs = {} + + +def configFindResFile(config, basedir, name): + fn = os.path.join(basedir, config.get("res", name)) + if not os.path.isfile(fn): + basedir = os.path.dirname(basedir) + fn = os.path.join(basedir, config.get("res", name)) + if not os.path.isfile(fn): + fn = None + return fn + + +def main(): + jsonSchemaDescr = '{[unicode:int]: glyphname:string, ...}' + + argparser = ArgumentParser( + description='Rename glyphnames in UFO kerning and remove unused groups and glyphnames.') + + argparser.add_argument( + '-dry', dest='dryRun', action='store_const', const=True, default=False, + help='Do not modify anything, but instead just print what would happen.') + + argparser.add_argument( + '-no-stats', dest='noStats', action='store_const', const=True, default=False, + help='Do not print statistics at the end.') + + argparser.add_argument( + '-save-stats', dest='saveStatsPath', metavar='', type=str, + help='Write detailed statistics to JSON file.') + + argparser.add_argument( + '-src-json', dest='srcJSONFile', metavar='', type=str, + help='JSON file to read glyph names from.'+ + ' Expected schema: ' + jsonSchemaDescr + ' (e.g. {2126: "Omega"})') + + argparser.add_argument( + '-src-font', dest='srcFontFile', metavar='', type=str, + help='TrueType or OpenType font to read glyph names from.') + + argparser.add_argument( + 'dstFontsPaths', metavar='', type=str, nargs='+', help='UFO fonts to update') + + args = argparser.parse_args() + dryRun = args.dryRun + + if args.srcJSONFile and args.srcFontFile: + argparser.error('Both -src-json and -src-font specified -- please provide only one.') + + # Strip trailing slashes from font paths + args.dstFontsPaths = [s.rstrip('/ ') for s in args.dstFontsPaths] + + # Load source char map + srcCharMap = None + if args.srcJSONFile: + try: + srcCharMap = loadJSONCharMap(args.srcJSONFile) + except Exception as err: + argparser.error('Invalid JSON: Expected schema %s (%s)' % (jsonSchemaDescr, err)) + elif args.srcFontFile: + srcCharMap = getTTCharMap(args.srcFontFile.rstrip('/ ')) # -> { 2126: 'Omegagreek', ...} + else: + argparser.error('No source provided (-src-* argument missing)') + if len(srcCharMap) == 0: + print('Empty character map', file=sys.stderr) + sys.exit(1) + + # Find project source dir + srcDir = '' + for dstFontPath in args.dstFontsPaths: + s = os.path.dirname(dstFontPath) + if not srcDir: + srcDir = s + elif srcDir != s: + raise Exception('All s must be rooted in the same directory') + + # Load font project config + # load fontbuild configuration + config = RawConfigParser(dict_type=OrderedDict) + configFilename = os.path.join(srcDir, 'fontbuild.cfg') + config.read(configFilename) + diacriticsFile = configFindResFile(config, srcDir, 'diacriticfile') + + for dstFontPath in args.dstFontsPaths: + dstFont = OpenFont(dstFontPath) + dstCharMap = dstFont.getCharacterMapping() # -> { 2126: [ 'Omega', ...], ...} + dstRevCharMap = revCharMap(dstCharMap) # { 'Omega': 2126, ...} + srcToDstMap = getGlyphNameDifferenceMap(srcCharMap, dstCharMap, dstRevCharMap) + + stats = Stats() + + groups, glyphToGroups = fixupGroups(dstFontPath, dstRevCharMap, srcToDstMap, dryRun, stats) + fixupKerning(dstFontPath, dstRevCharMap, srcToDstMap, groups, glyphToGroups, dryRun, stats) + + # stats + if args.saveStatsPath or not args.noStats: + if not args.noStats: + print('stats for %s:' % dstFontPath) + print(' Deleted %d groups and %d glyphs.' % ( + len(stats.removedGroups), len(stats.removedGlyphs))) + print(' Renamed %d glyphs.' % len(stats.renamedGlyphs)) + print(' Simplified %d kerning pairs.' % len(stats.simplifiedKerningPairs)) + if args.saveStatsPath: + statsObj = { + 'deletedGroups': stats.removedGroups, + 'deletedGlyphs': stats.removedGlyphs, + 'simplifiedKerningPairs': stats.simplifiedKerningPairs, + 'renamedGlyphs': stats.renamedGlyphs, + } + f = sys.stdout + try: + if args.saveStatsPath != '-': + f = open(args.saveStatsPath, 'w') + print('Writing stats to', args.saveStatsPath) + json.dump(statsObj, sys.stdout, indent=2, separators=(',', ': ')) + finally: + if f is not sys.stdout: + f.close() + + +if __name__ == '__main__': + main() diff --git a/misc/fontinfo.py b/misc/fontinfo.py new file mode 100755 index 000000000..47e2d66b1 --- /dev/null +++ b/misc/fontinfo.py @@ -0,0 +1,391 @@ +#!/usr/bin/env python +# encoding: utf8 +# +# Generates JSON-encoded information about fonts +# +import os +import sys +import argparse +import json + +from fontTools import ttLib +from fontTools.misc import sstruct +from fontTools.ttLib.tables._h_e_a_d import headFormat +from fontTools.ttLib.tables._h_h_e_a import hheaFormat +from fontTools.ttLib.tables._m_a_x_p import maxpFormat_0_5, maxpFormat_1_0_add +from fontTools.ttLib.tables._p_o_s_t import postFormat +from fontTools.ttLib.tables.O_S_2f_2 import OS2_format_1, OS2_format_2, OS2_format_5 +# from robofab.world import world, RFont, RGlyph, OpenFont, NewFont +# from robofab.objects.objectsRF import RFont, RGlyph, OpenFont, NewFont, RContour + +_NAME_IDS = {} + + +def num(s): + return int(s) if s.find('.') == -1 else float(s) + + +def tableNamesToDict(table, names): + t = {} + for name in names: + if name.find('reserved') == 0: + continue + t[name] = getattr(table, name) + return t + + +def sstructTableToDict(table, format): + _, names, _ = sstruct.getformat(format) + return tableNamesToDict(table, names) + + +OUTPUT_TYPE_COMPLETE = 'complete' +OUTPUT_TYPE_GLYPHLIST = 'glyphlist' + + +GLYPHS_TYPE_UNKNOWN = '?' +GLYPHS_TYPE_TT = 'tt' +GLYPHS_TYPE_CFF = 'cff' + +def getGlyphsType(tt): + if 'CFF ' in tt: + return GLYPHS_TYPE_CFF + elif 'glyf' in tt: + return GLYPHS_TYPE_TT + return GLYPHS_TYPE_UNKNOWN + + +class GlyphInfo: + def __init__(self, g, name, unicodes, type, glyphTable): + self._type = type # GLYPHS_TYPE_* + self._glyphTable = glyphTable + + self.name = name + self.width = g.width + self.lsb = g.lsb + self.unicodes = unicodes + + if g.height is not None: + self.tsb = g.tsb + self.height = g.height + else: + self.tsb = 0 + self.height = 0 + + self.numContours = 0 + self.contoursBBox = (0,0,0,0) # xMin, yMin, xMax, yMax + self.hasHints = False + + if self._type is GLYPHS_TYPE_CFF: + self._addCFFInfo() + elif self._type is GLYPHS_TYPE_TT: + self._addTTInfo() + + def _addTTInfo(self): + g = self._glyphTable[self.name] + self.numContours = g.numberOfContours + if g.numberOfContours: + self.contoursBBox = (g.xMin,g.xMin,g.xMax,g.yMax) + self.hasHints = hasattr(g, "program") + + def _addCFFInfo(self): + # TODO: parse CFF dict tree + pass + + @classmethod + def structKeys(cls, type): + v = [ + 'name', + 'unicodes', + 'width', + 'lsb', + 'height', + 'tsb', + 'hasHints', + ] + if type is GLYPHS_TYPE_TT: + v += ( + 'numContours', + 'contoursBBox', + ) + return v + + def structValues(self): + v = [ + self.name, + self.unicodes, + self.width, + self.lsb, + self.height, + self.tsb, + self.hasHints, + ] + if self._type is GLYPHS_TYPE_TT: + v += ( + self.numContours, + self.contoursBBox, + ) + return v + + +# exported convenience function +def GenGlyphList(font, withGlyphs=None): + if isinstance(font, str): + font = ttLib.TTFont(font) + return genGlyphsInfo(font, OUTPUT_TYPE_GLYPHLIST) + + +def genGlyphsInfo(tt, outputType, glyphsType=GLYPHS_TYPE_UNKNOWN, glyphsTable=None, withGlyphs=None): + unicodeMap = {} + + glyphnameFilter = None + if isinstance(withGlyphs, str): + glyphnameFilter = withGlyphs.split(',') + + if 'cmap' in tt: + # https://developer.apple.com/fonts/TrueType-Reference-Manual/RM06/Chap6cmap.html + bestCodeSubTable = None + bestCodeSubTableFormat = 0 + for st in tt['cmap'].tables: + if st.platformID == 0: # 0=unicode, 1=mac, 2=(reserved), 3=microsoft + if st.format > bestCodeSubTableFormat: + bestCodeSubTable = st + bestCodeSubTableFormat = st.format + for cp, glyphname in bestCodeSubTable.cmap.items(): + if glyphname in unicodeMap: + unicodeMap[glyphname].append(cp) + else: + unicodeMap[glyphname] = [cp] + + glyphValues = [] + glyphset = tt.getGlyphSet(preferCFF=glyphsType is GLYPHS_TYPE_CFF) + + glyphnames = tt.getGlyphOrder() if glyphnameFilter is None else glyphnameFilter + + if outputType is OUTPUT_TYPE_GLYPHLIST: + glyphValues = [] + for glyphname in glyphnames: + v = [glyphname] + if glyphname in unicodeMap: + v += unicodeMap[glyphname] + glyphValues.append(v) + return glyphValues + + for glyphname in glyphnames: + unicodes = unicodeMap[glyphname] if glyphname in unicodeMap else [] + try: + g = glyphset[glyphname] + except KeyError: + raise Exception('no such glyph "'+glyphname+'"') + gi = GlyphInfo(g, glyphname, unicodes, glyphsType, glyphsTable) + glyphValues.append(gi.structValues()) + + return { + 'keys': GlyphInfo.structKeys(glyphsType), + 'values': glyphValues, + } + + +def copyDictEntry(srcD, srcName, dstD, dstName): + try: + dstD[dstName] = srcD[srcName] + except: + pass + + +def addCFFFontInfo(tt, info, cffTable): + d = cffTable.rawDict + + nameDict = None + if 'name' not in info: + nameDict = {} + info['name'] = nameDict + else: + nameDict = info['name'] + + copyDictEntry(d, 'Weight', nameDict, 'weight') + copyDictEntry(d, 'version', nameDict, 'version') + + +def genFontInfo(fontpath, outputType, withGlyphs=True): + tt = ttLib.TTFont(fontpath) # lazy=True + info = { + 'id': fontpath, + } + + # for tableName in tt.keys(): + # print 'table', tableName + + nameDict = {} + if 'name' in tt: + nameDict = {} + for rec in tt['name'].names: + k = _NAME_IDS[rec.nameID] if rec.nameID in _NAME_IDS else ('#%d' % rec.nameID) + nameDict[k] = rec.toUnicode() + if 'fontId' in nameDict: + info['id'] = nameDict['fontId'] + + if 'postscriptName' in nameDict: + info['name'] = nameDict['postscriptName'] + elif 'familyName' in nameDict: + info['name'] = nameDict['familyName'].replace(' ', '') + if 'subfamilyName' in nameDict: + info['name'] += '-' + nameDict['subfamilyName'].replace(' ', '') + + if outputType is not OUTPUT_TYPE_GLYPHLIST: + if len(nameDict): + info['names'] = nameDict + + if 'head' in tt: + info['head'] = sstructTableToDict(tt['head'], headFormat) + + if 'hhea' in tt: + info['hhea'] = sstructTableToDict(tt['hhea'], hheaFormat) + + if 'post' in tt: + info['post'] = sstructTableToDict(tt['post'], postFormat) + + if 'OS/2' in tt: + t = tt['OS/2'] + if t.version == 1: + info['os/2'] = sstructTableToDict(t, OS2_format_1) + elif t.version in (2, 3, 4): + info['os/2'] = sstructTableToDict(t, OS2_format_2) + elif t.version == 5: + info['os/2'] = sstructTableToDict(t, OS2_format_5) + info['os/2']['usLowerOpticalPointSize'] /= 20 + info['os/2']['usUpperOpticalPointSize'] /= 20 + if 'panose' in info['os/2']: + del info['os/2']['panose'] + + # if 'maxp' in tt: + # table = tt['maxp'] + # _, names, _ = sstruct.getformat(maxpFormat_0_5) + # if table.tableVersion != 0x00005000: + # _, names_1_0, _ = sstruct.getformat(maxpFormat_1_0_add) + # names += names_1_0 + # info['maxp'] = tableNamesToDict(table, names) + + glyphsType = getGlyphsType(tt) + glyphsTable = None + if glyphsType is GLYPHS_TYPE_CFF: + cff = tt["CFF "].cff + cffDictIndex = cff.topDictIndex + if len(cffDictIndex) > 1: + sys.stderr.write( + 'warning: multi-font CFF table is unsupported. Only reporting first table.\n' + ) + cffTable = cffDictIndex[0] + if outputType is not OUTPUT_TYPE_GLYPHLIST: + addCFFFontInfo(tt, info, cffTable) + elif glyphsType is GLYPHS_TYPE_TT: + glyphsTable = tt["glyf"] + # print 'glyphs type:', glyphsType, 'flavor:', tt.flavor, 'sfntVersion:', tt.sfntVersion + + if (withGlyphs is not False or outputType is OUTPUT_TYPE_GLYPHLIST) and withGlyphs is not '': + info['glyphs'] = genGlyphsInfo(tt, outputType, glyphsType, glyphsTable, withGlyphs) + + # sys.exit(1) + + return info + + +# ———————————————————————————————————————————————————————————————————————— +# main + +def main(): + argparser = argparse.ArgumentParser(description='Generate JSON describing fonts') + + argparser.add_argument('-out', dest='outfile', metavar='', type=str, + help='Write JSON to . Writes to stdout if not specified') + + argparser.add_argument('-pretty', dest='prettyJson', action='store_const', + const=True, default=False, + help='Generate pretty JSON with linebreaks and indentation') + + argparser.add_argument('-with-all-glyphs', dest='withGlyphs', action='store_const', + const=True, default=False, + help='Include glyph information on all glyphs.') + + argparser.add_argument('-with-glyphs', dest='withGlyphs', metavar='glyphname[,glyphname ...]', + type=str, + help='Include glyph information on specific glyphs') + + argparser.add_argument('-as-glyphlist', dest='asGlyphList', + action='store_const', const=True, default=False, + help='Only generate a list of glyphs and their unicode mappings.') + + argparser.add_argument('fontpaths', metavar='', type=str, nargs='+', + help='TrueType or OpenType font files') + + args = argparser.parse_args() + + fonts = {} + outputType = OUTPUT_TYPE_COMPLETE + if args.asGlyphList: + outputType = OUTPUT_TYPE_GLYPHLIST + + n = 0 + for fontpath in args.fontpaths: + if n > 0: + # workaround for a bug in fontTools.misc.sstruct where it keeps a global + # internal cache that mixes up values for different fonts. + reload(sstruct) + font = genFontInfo(fontpath, outputType=outputType, withGlyphs=args.withGlyphs) + fonts[font['id']] = font + n += 1 + + ostream = sys.stdout + if args.outfile is not None: + ostream = open(args.outfile, 'w') + + + if args.prettyJson: + json.dump(fonts, ostream, sort_keys=True, indent=2, separators=(',', ': ')) + else: + json.dump(fonts, ostream, separators=(',', ':')) + + + if ostream is not sys.stdout: + ostream.close() + + + +# "name" table name identifiers +_NAME_IDS = { + # TrueType & OpenType + 0: 'copyright', + 1: 'familyName', + 2: 'subfamilyName', + 3: 'fontId', + 4: 'fullName', + 5: 'version', # e.g. 'Version .' + 6: 'postscriptName', + 7: 'trademark', + 8: 'manufacturerName', + 9: 'designer', + 10: 'description', + 11: 'vendorURL', + 12: 'designerURL', + 13: 'licenseDescription', + 14: 'licenseURL', + 15: 'RESERVED', + 16: 'typoFamilyName', + 17: 'typoSubfamilyName', + 18: 'macCompatibleFullName', # Mac only (FOND) + 19: 'sampleText', + + # OpenType + 20: 'postScriptCIDName', + 21: 'wwsFamilyName', + 22: 'wwsSubfamilyName', + 23: 'lightBackgoundPalette', + 24: 'darkBackgoundPalette', + 25: 'variationsPostScriptNamePrefix', + + # 26-255: Reserved for future expansion + # 256-32767: Font-specific names (layout features and settings, variations, track names, etc.) +} + +if __name__ == '__main__': + main() diff --git a/misc/gen-glyphinfo.py b/misc/gen-glyphinfo.py new file mode 100755 index 000000000..cec90b048 --- /dev/null +++ b/misc/gen-glyphinfo.py @@ -0,0 +1,245 @@ +#!/usr/bin/env python +# encoding: utf8 +# +# Grab http://www.unicode.org/Public/UCD/latest/ucd/UnicodeData.txt +# +from __future__ import print_function +import os, sys, json, re +from argparse import ArgumentParser +from robofab.objects.objectsRF import OpenFont +from collections import OrderedDict +from unicode_util import parseUnicodeDataFile + + +# Regex matching "default" glyph names, like "uni2043" and "u01C5" +uniNameRe = re.compile(r'^u(?:ni)([0-9A-F]{4,8})$') + + +def unicodeForDefaultGlyphName(glyphName): + m = uniNameRe.match(glyphName) + if m is not None: + try: + return int(m.group(1), 16) + except: + pass + return None + + +def loadAGL(filename): # -> { 2126: 'Omega', ... } + m = {} + with open(filename, 'r') as f: + for line in f: + # Omega;2126 + # dalethatafpatah;05D3 05B2 # higher-level combinations; ignored + line = line.strip() + if len(line) > 0 and line[0] != '#': + name, uc = tuple([c.strip() for c in line.split(';')]) + if uc.find(' ') == -1: + # it's a 1:1 mapping + m[int(uc, 16)] = name + return m + + +def loadLocalNamesDB(fonts, agl, diacriticComps): + uc2names = None # { 2126: ['Omega', ...], ...} + allNames = OrderedDict() # {'Omega':True, ...} + + for font in fonts: + _uc2names = font.getCharacterMapping() # { 2126: ['Omega', ...], ...} + if uc2names is None: + uc2names = _uc2names + else: + for uc, _names in _uc2names.iteritems(): + names = uc2names.setdefault(uc, []) + for name in _names: + if name not in names: + names.append(name) + for g in font: + allNames.setdefault(g.name, True) + + # agl { 2126: 'Omega', ...} -> { 'Omega': [2126, ...], ...} + aglName2Ucs = {} + for uc, name in agl.iteritems(): + aglName2Ucs.setdefault(name, []).append(uc) + + for glyphName, comp in diacriticComps.iteritems(): + aglUCs = aglName2Ucs.get(glyphName) + if aglUCs is None: + uc = unicodeForDefaultGlyphName(glyphName) + if uc is not None: + glyphName2 = agl.get(uc) + if glyphName2 is not None: + glyphName = glyphName2 + names = uc2names.setdefault(uc, []) + if glyphName not in names: + names.append(glyphName) + allNames.setdefault(glyphName, True) + else: + allNames.setdefault(glyphName, True) + for uc in aglUCs: + names = uc2names.get(uc, []) + if glyphName not in names: + names.append(glyphName) + uc2names[uc] = names + + name2ucs = {} # { 'Omega': [2126, ...], ...} + for uc, names in uc2names.iteritems(): + for name in names: + name2ucs.setdefault(name, set()).add(uc) + + return uc2names, name2ucs, allNames + + +def canonicalGlyphName(glyphName, uc2names): + uc = unicodeForDefaultGlyphName(glyphName) + if uc is not None: + names = uc2names.get(uc) + if names is not None and len(names) > 0: + return names[0] + return glyphName + + +def parseGlyphComposition(composite): + c = composite.split("=") + d = c[1].split("/") + glyphName = d[0] + if len(d) == 1: + offset = [0, 0] + else: + offset = [int(i) for i in d[1].split(",")] + accentString = c[0] + accents = accentString.split("+") + baseName = accents.pop(0) + accentNames = [i.split(":") for i in accents] + return (glyphName, baseName, accentNames, offset) + + +def loadGlyphCompositions(filename): # { glyphName => (baseName, accentNames, offset) } + compositions = OrderedDict() + with open(filename, 'r') as f: + for line in f: + line = line.strip() + if len(line) > 0 and line[0] != '#': + glyphName, baseName, accentNames, offset = parseGlyphComposition(line) + compositions[glyphName] = (baseName, accentNames, offset) + return compositions + + +def rgbaToCSSColor(r=0, g=0, b=0, a=1): + R,G,B = int(r * 255), int(g * 255), int(b * 255) + if a == 1: + return '#%02x%02x%02x' % (R,G,B) + else: + return 'rgba(%d,%d,%d,%f)' % (R,G,B,a) + + +def unicodeName(cp): + if cp is not None and len(cp.name): + if cp.name[0] == '<': + return '[' + cp.categoryName + ']' + elif len(cp.name): + return cp.name + return None + + +def main(): + argparser = ArgumentParser( + description='Generate info on name, unicodes and color mark for all glyphs') + + argparser.add_argument( + '-ucd', dest='ucdFile', metavar='', type=str, + help='UnicodeData.txt file from http://www.unicode.org/') + + argparser.add_argument( + 'fontPaths', metavar='', type=str, nargs='+', help='UFO fonts to update') + + args = argparser.parse_args() + markLibKey = 'com.typemytype.robofont.mark' + + fontPaths = [] + for fontPath in args.fontPaths: + fontPath = fontPath.rstrip('/ ') + if 'regular' or 'Regular' in fontPath: + fontPaths = [fontPath] + fontPaths + else: + fontPaths.append(fontPath) + + fonts = [OpenFont(fontPath) for fontPath in args.fontPaths] + + agl = loadAGL('src/glyphlist.txt') # { 2126: 'Omega', ... } + diacriticComps = loadGlyphCompositions('src/diacritics.txt') + uc2names, name2ucs, allNames = loadLocalNamesDB(fonts, agl, diacriticComps) + + ucd = {} + if args.ucdFile: + ucd = parseUnicodeDataFile(args.ucdFile) + + glyphorder = OrderedDict() + with open(os.path.join(os.path.dirname(args.fontPaths[0]), 'glyphorder.txt'), 'r') as f: + for name in f.read().splitlines(): + if len(name) and name[0] != '#': + glyphorder[name] = True + + for name in diacriticComps.iterkeys(): + glyphorder[name] = True + + glyphNames = glyphorder.keys() + visitedGlyphNames = set() + glyphs = [] + + for font in fonts: + for name, v in glyphorder.iteritems(): + if name in visitedGlyphNames: + continue + + g = None + ucs = [] + try: + g = font[name] + ucs = g.unicodes + except: + ucs = name2ucs.get(name) + if ucs is None: + continue + + color = None + if g is not None and markLibKey in g.lib: + # TODO: translate from (r,g,b,a) to #RRGGBB (skip A) + rgba = g.lib[markLibKey] + if isinstance(rgba, list) or isinstance(rgba, tuple): + color = rgbaToCSSColor(*rgba) + elif name in diacriticComps: + color = '' + + # name[, unicode[, unicodeName[, color]]] + if len(ucs): + for uc in ucs: + ucName = unicodeName(ucd.get(uc)) + + if not ucName and uc >= 0xE000 and uc <= 0xF8FF: + ucName = '[private use %04X]' % uc + + if color: + glyph = [name, uc, ucName, color] + elif ucName: + glyph = [name, uc, ucName] + else: + glyph = [name, uc] + glyphs.append(glyph) + else: + glyph = [name, None, None, color] if color else [name] + glyphs.append(glyph) + + visitedGlyphNames.add(name) + + print('{"glyphs":[') + prefix = ' ' + for g in glyphs: + print(prefix + json.dumps(g)) + if prefix == ' ': + prefix = ', ' + print(']}') + + +if __name__ == '__main__': + main() diff --git a/misc/gen-glyphorder.py b/misc/gen-glyphorder.py new file mode 100755 index 000000000..0817e97b0 --- /dev/null +++ b/misc/gen-glyphorder.py @@ -0,0 +1,65 @@ +#!/usr/bin/env python +# encoding: utf8 +from __future__ import print_function +import os, plistlib +from collections import OrderedDict +from argparse import ArgumentParser + + +def parseGlyphComposition(composite): + c = composite.split("=") + d = c[1].split("/") + glyphName = d[0] + if len(d) == 1: + offset = [0, 0] + else: + offset = [int(i) for i in d[1].split(",")] + accentString = c[0] + accents = accentString.split("+") + baseName = accents.pop(0) + accentNames = [i.split(":") for i in accents] + return (glyphName, baseName, accentNames, offset) + + +def loadGlyphCompositions(filename): # { glyphName => (baseName, accentNames, offset) } + compositions = OrderedDict() + with open(filename, 'r') as f: + for line in f: + line = line.strip() + if len(line) > 0 and line[0] != '#': + glyphName, baseName, accentNames, offset = parseGlyphComposition(line) + compositions[glyphName] = (baseName, accentNames, offset) + return compositions + + +def main(): + argparser = ArgumentParser(description='Generate glyph order list from UFO files') + argparser.add_argument('fontPaths', metavar='', type=str, nargs='+', help='UFO files') + args = argparser.parse_args() + + glyphorderUnion = OrderedDict() + + fontPaths = [] + for fontPath in args.fontPaths: + if 'regular' or 'Regular' in fontPath: + fontPaths = [fontPath] + fontPaths + else: + fontPaths.append(fontPath) + + for fontPath in fontPaths: + libPlist = plistlib.readPlist(os.path.join(fontPath, 'lib.plist')) + if 'public.glyphOrder' in libPlist: + for name in libPlist['public.glyphOrder']: + glyphorderUnion[name] = True + + # incorporate src/diacritics.txt + # diacriticComps = loadGlyphCompositions('src/diacritics.txt') + # for glyphName in diacriticComps.iterkeys(): + # glyphorderUnion[glyphName] = True + + glyphorderUnionNames = glyphorderUnion.keys() + print('\n'.join(glyphorderUnionNames)) + + +if __name__ == '__main__': + main() diff --git a/misc/gen-kern.py b/misc/gen-kern.py new file mode 100644 index 000000000..e5a4c4875 --- /dev/null +++ b/misc/gen-kern.py @@ -0,0 +1,37 @@ + +def parseFeaList(s): + v = [] + for e in s.split(' '): + if e.find('-') != -1: + (a,b) = e.split('-') + #print 'split: %s, %s' % (a,chr(ord(a)+1)) + i = ord(a) + end = ord(b)+1 + while i < end: + v.append(chr(i)) + i += 1 + else: + v.append(e) + return v + +UC_ROMAN = parseFeaList('A-Z AE AEacute Aacute Abreve Acircumflex Adieresis Agrave Alpha Alphatonos Amacron Aogonek Aogonek.NAV Aring Aringacute Atilde Beta Cacute Ccaron Ccedilla Ccircumflex Chi Dcaron Dcroat Delta Eacute Ebreve Ecaron Ecircumflex Edieresis Edotaccent Egrave Emacron Eng Eogonek Eogonek.NAV Epsilon Epsilontonos Eta Etatonos Eth Gamma Gbreve Gcircumflex Gcommaaccent Germandbls Hbar Hcircumflex IJ Iacute Ibreve Icircumflex Idieresis Igrave Imacron Iogonek Iota Iotadieresis Iotatonos Itilde Jcircumflex Kappa Kcommaaccent Lacute Lambda Lcaron Lcommaaccent Ldot Lslash Nacute Ncaron Ncommaaccent Ntilde Nu OE Oacute Obreve Ocircumflex Odieresis Ograve Ohungarumlaut Omacron Omega Omegatonos Omicron Omicrontonos Oogonek Oogonek.NAV Oslash Oslashacute Otilde Phi Pi Psi Racute Rcaron Rcommaaccent Rho Sacute Scaron Scedilla Scircumflex Sigma Tau Tbar Tcaron Theta Thorn Uacute Ubreve Ucircumflex Udieresis Ugrave Uhungarumlaut Umacron Uogonek Upsilon Upsilondieresis Upsilontonos Uring Utilde Wacute Wcircumflex Wdieresis Wgrave Xi Yacute Ycircumflex Ydieresis Ygrave Zacute Zcaron Zdotaccent Zeta ampersand uni010A uni0120 uni0162 uni0218 uni021A uni037F') +LC_ROMAN = parseFeaList('a-z ae aeacute aacute abreve acircumflex adieresis agrave alpha alphatonos amacron aogonek aogonek.NAV aring aringacute atilde beta cacute ccaron ccedilla ccircumflex chi dcaron dcroat delta eacute ebreve ecaron ecircumflex edieresis edotaccent egrave emacron eng eogonek eogonek.NAV epsilon epsilontonos eta etatonos eth gamma gbreve gcircumflex gcommaaccent germandbls hbar hcircumflex ij iacute ibreve icircumflex idieresis igrave imacron iogonek iota iotadieresis iotatonos itilde jcircumflex kappa kcommaaccent lacute lambda lcaron lcommaaccent ldot lslash nacute ncaron ncommaaccent ntilde nu oe oacute obreve ocircumflex odieresis ograve ohungarumlaut omacron omega omegatonos omicron omicrontonos oogonek oogonek.NAV oslash oslashacute otilde phi pi psi racute rcaron rcommaaccent rho sacute scaron scedilla scircumflex sigma tau tbar tcaron theta thorn uacute ubreve ucircumflex udieresis ugrave uhungarumlaut umacron uogonek upsilon upsilondieresis upsilontonos uring utilde wacute wcircumflex wdieresis wgrave xi yacute ycircumflex ydieresis ygrave zacute zcaron zdotaccent zeta ampersand uni010B uni0121 uni0163 uni0219 uni021B uni03F3') + +UC_AF = parseFeaList('A-F') +LC_AF = parseFeaList('a-f') + +LNUM = parseFeaList('zero one two three four five six seven eight nine') + +HEXNUM = LNUM + UC_AF + LC_AF +ALL = UC_ROMAN + LC_ROMAN + LNUM + +glyphs = HEXNUM +for g in glyphs: + print ' %s' % g + for g in glyphs: + print ' %s-256' % g + print ' ' + +# print ', '.join(LC_ROMAN) + + diff --git a/misc/gen-num-pairs.js b/misc/gen-num-pairs.js new file mode 100644 index 000000000..9dbb92090 --- /dev/null +++ b/misc/gen-num-pairs.js @@ -0,0 +1,10 @@ + +const chars = '0 1 2 3 4 5 6 7 8 9 A B C D E F a b c d e f'.split(' ') + +for (let c1 of chars) { + let s = [] + for (let c2 of chars) { + s.push(c1 + c2) + } + console.log(s.join(' ')) +} diff --git a/misc/glyf-props.py b/misc/glyf-props.py new file mode 100755 index 000000000..8783a422d --- /dev/null +++ b/misc/glyf-props.py @@ -0,0 +1,63 @@ +#!/usr/bin/env python +# encoding: utf8 +from __future__ import print_function +import os, sys +from argparse import ArgumentParser +from robofab.objects.objectsRF import OpenFont + + +dryRun = False + +def renameProps(font, renames): + for g in font: + for currname, newname in renames: + if currname in g.lib: + if newname in g.lib: + raise Exception('property %r already exist in glyph %r' % (newname, g)) + g.lib[newname] = g.lib[currname] + del g.lib[currname] + + +def main(): + argparser = ArgumentParser( + description='Operate on UFO glyf "lib" properties') + + argparser.add_argument( + '-dry', dest='dryRun', action='store_const', const=True, default=False, + help='Do not modify anything, but instead just print what would happen.') + + argparser.add_argument( + '-m', dest='renameProps', metavar='=[,...]', type=str, + help='Rename properties') + + argparser.add_argument( + 'fontPaths', metavar='', type=str, nargs='+', help='UFO fonts to update') + + args = argparser.parse_args() + dryRun = args.dryRun + + renames = [] + if args.renameProps: + renames = [tuple(s.split('=')) for s in args.renameProps.split(',')] + # TODO: verify data structure + print('renaming properties:') + for rename in renames: + print(' %r => %r' % rename) + + # Strip trailing slashes from font paths and iterate + for fontPath in [s.rstrip('/ ') for s in args.fontPaths]: + font = OpenFont(fontPath) + + if len(renames): + print('Renaming properties in %s' % fontPath) + renameProps(font, renames) + + if dryRun: + print('Saving changes to %s (dry run)' % fontPath) + if not dryRun: + print('Saving changes to %s' % fontPath) + font.save() + + +if __name__ == '__main__': + main() diff --git a/misc/mac-tmp-disk-mount.sh b/misc/mac-tmp-disk-mount.sh new file mode 100755 index 000000000..f752c54f6 --- /dev/null +++ b/misc/mac-tmp-disk-mount.sh @@ -0,0 +1,25 @@ +#!/bin/bash +set -e +cd "$(dirname "$0")/.." + +# Create if needed +if [[ ! -f build/tmp.sparseimage ]]; then + echo "Creating sparse disk image with case-sensitive file system build/tmp.sparseimage" + mkdir -p build + hdiutil create build/tmp.sparseimage \ + -size 1g \ + -type SPARSE \ + -fs JHFS+X \ + -volname tmp +fi + +# Mount if needed +if ! (diskutil info build/tmp >/dev/null); then + echo "Mounting sparse disk image with case-sensitive file system at build/tmp" + hdiutil attach build/tmp.sparseimage \ + -readwrite \ + -mountpoint "$(pwd)/build/tmp" \ + -nobrowse \ + -noautoopen \ + -noidmereveal +fi diff --git a/misc/mac-tmp-disk-unmount.sh b/misc/mac-tmp-disk-unmount.sh new file mode 100755 index 000000000..d0dcff925 --- /dev/null +++ b/misc/mac-tmp-disk-unmount.sh @@ -0,0 +1,5 @@ +#!/bin/bash +set -e +cd "$(dirname "$0")/.." + +diskutil unmount build/tmp diff --git a/misc/notify b/misc/notify new file mode 100755 index 000000000..ab10e4e8e --- /dev/null +++ b/misc/notify @@ -0,0 +1,41 @@ +#!/bin/bash +# +# Shows macOS desktop notifications when a command completes. +# Depending on exit status of the command, a different notification message is shown. +# +# Examples: +# misc/nofify make -j 8 >/dev/null +# Make all font styles in all formats without printing detailed messages +# +# misc/notify make Regular +# Make the regular style in all formats +# + +HAS_NOTIFIER=true +if ! (which terminal-notifier >/dev/null); then + HAS_NOTIFIER=false + echo "$0: terminal-notifier not found in PATH (will not notify)" >&2 + echo "$0: You can install through: brew install terminal-notifier" +fi + +CMDS="$@" +"$@" +STATUS=$? + +if $HAS_NOTIFIER; then + if [[ $STATUS -eq 0 ]]; then + terminal-notifier \ + -title "$1 ✅" \ + -message "$CMDS" \ + -activate com.apple.Terminal \ + -timeout 8 >/dev/null & + else + terminal-notifier \ + -title "$1 failed ❌" \ + -message "$CMDS => $STATUS" \ + -activate com.apple.Terminal \ + -timeout 20 >/dev/null & + fi +fi + +exit $STATUS diff --git a/misc/pylib/fontbuild/Build.py b/misc/pylib/fontbuild/Build.py new file mode 100644 index 000000000..5046f9f91 --- /dev/null +++ b/misc/pylib/fontbuild/Build.py @@ -0,0 +1,300 @@ +# Copyright 2015 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import ConfigParser +import os +import sys + +from booleanOperations import BooleanOperationManager +from cu2qu.ufo import fonts_to_quadratic +from fontTools.misc.transform import Transform +from robofab.world import OpenFont +from ufo2ft import compileOTF, compileTTF + +from fontbuild.decomposeGlyph import decomposeGlyph +from fontbuild.features import readFeatureFile, writeFeatureFile +from fontbuild.generateGlyph import generateGlyph +from fontbuild.instanceNames import setInfoRF +from fontbuild.italics import italicizeGlyph +from fontbuild.markFeature import RobotoFeatureCompiler, RobotoKernWriter +from fontbuild.mitreGlyph import mitreGlyph +from fontbuild.mix import Mix,Master,narrowFLGlyph + + +class FontProject: + + def __init__(self, basefont, basedir, configfile, buildTag=''): + self.basefont = basefont + self.basedir = basedir + self.config = ConfigParser.RawConfigParser() + self.configfile = os.path.join(self.basedir, configfile) + self.config.read(self.configfile) + self.buildTag = buildTag + + self.diacriticList = [ + line.strip() for line in self.openResource("diacriticfile") + if not line.startswith("#")] + self.adobeGlyphList = dict( + line.split(";") for line in self.openResource("agl_glyphlistfile") + if not line.startswith("#")) + self.glyphOrder = self.openResource("glyphorder") + + # map exceptional glyph names in Roboto to names in the AGL + roboNames = ( + ('Obar', 'Ocenteredtilde'), ('obar', 'obarred'), + ('eturn', 'eturned'), ('Iota1', 'Iotaafrican')) + for roboName, aglName in roboNames: + self.adobeGlyphList[roboName] = self.adobeGlyphList[aglName] + + self.builddir = "out" + self.decompose = self.config.get("glyphs","decompose").split() + self.predecompose = self.config.get("glyphs","predecompose").split() + self.lessItalic = self.config.get("glyphs","lessitalic").split() + self.deleteList = self.config.get("glyphs","delete").split() + self.noItalic = self.config.get("glyphs","noitalic").split() + + self.buildOTF = False + self.compatible = False + self.generatedFonts = [] + + def openResource(self, name): + with open(os.path.join( + self.basedir, self.config.get("res", name))) as resourceFile: + resource = resourceFile.read() + return resource.splitlines() + + def generateOutputPath(self, font, ext): + family = font.info.familyName.replace(" ", "") + style = font.info.styleName.replace(" ", "") + path = os.path.join(self.basedir, self.builddir, family + ext.upper()) + if not os.path.exists(path): + os.makedirs(path) + return os.path.join(path, "%s-%s.%s" % (family, style, ext)) + + def generateFont(self, mix, names, italic=False, swapSuffixes=None, stemWidth=185, + italicMeanYCenter=-825, italicNarrowAmount=1): + + n = names.split("/") + log("---------------------\n%s %s\n----------------------" %(n[0],n[1])) + log(">> Mixing masters") + if isinstance( mix, Mix): + f = mix.generateFont(self.basefont) + else: + f = mix.copy() + + if italic == True: + log(">> Italicizing") + i = 0 + for g in f: + i += 1 + if i % 10 == 0: print g.name + + if g.name == "uniFFFD": + continue + + decomposeGlyph(f, g) + removeGlyphOverlap(g) + + if g.name in self.lessItalic: + italicizeGlyph(f, g, 9, stemWidth=stemWidth, + meanYCenter=italicMeanYCenter, + narrowAmount=italicNarrowAmount) + elif g.name not in self.noItalic: + italicizeGlyph(f, g, 10, stemWidth=stemWidth, + meanYCenter=italicMeanYCenter, + narrowAmount=italicNarrowAmount) + if g.width != 0: + g.width += 10 + + # set the oblique flag in fsSelection + f.info.openTypeOS2Selection.append(9) + + if swapSuffixes != None: + for swap in swapSuffixes: + swapList = [g.name for g in f if g.name.endswith(swap)] + for gname in swapList: + print gname + swapContours(f, gname.replace(swap,""), gname) + for gname in self.predecompose: + if f.has_key(gname): + decomposeGlyph(f, f[gname]) + + log(">> Generating glyphs") + generateGlyphs(f, self.diacriticList, self.adobeGlyphList) + log(">> Copying features") + readFeatureFile(f, self.basefont.features.text) + log(">> Decomposing") + for g in f: + if len(g.components) > 0: + decomposeGlyph(f, g) + # for gname in self.decompose: + # if f.has_key(gname): + # decomposeGlyph(f, f[gname]) + + copyrightHolderName = '' + if self.config.has_option('main', 'copyrightHolderName'): + copyrightHolderName = self.config.get('main', 'copyrightHolderName') + + def getcfg(name, fallback=''): + if self.config.has_option('main', name): + return self.config.get('main', name) + else: + return fallback + + setInfoRF(f, n, { + 'foundry': getcfg('foundry'), + 'foundryURL': getcfg('foundryURL'), + 'designer': getcfg('designer'), + 'copyrightHolderName': getcfg('copyrightHolderName'), + 'build': self.buildTag, + 'version': getcfg('version'), + 'license': getcfg('license'), + 'licenseURL': getcfg('licenseURL'), + }) + + if not self.compatible: + cleanCurves(f) + deleteGlyphs(f, self.deleteList) + + log(">> Generating font files") + ufoName = self.generateOutputPath(f, "ufo") + f.save(ufoName) + self.generatedFonts.append(ufoName) + + if self.buildOTF: + log(">> Generating OTF file") + newFont = OpenFont(ufoName) + otfName = self.generateOutputPath(f, "otf") + saveOTF(newFont, otfName, self.glyphOrder) + + def generateTTFs(self): + """Build TTF for each font generated since last call to generateTTFs.""" + + fonts = [OpenFont(ufo) for ufo in self.generatedFonts] + self.generatedFonts = [] + + log(">> Converting curves to quadratic") + # using a slightly higher max error (e.g. 0.0025 em), dots will have + # fewer control points and look noticeably different + max_err = 0.001 + if self.compatible: + fonts_to_quadratic(fonts, max_err_em=max_err, dump_stats=True, reverse_direction=True) + else: + for font in fonts: + fonts_to_quadratic([font], max_err_em=max_err, dump_stats=True, reverse_direction=True) + + log(">> Generating TTF files") + for font in fonts: + ttfName = self.generateOutputPath(font, "ttf") + log(os.path.basename(ttfName)) + saveOTF(font, ttfName, self.glyphOrder, truetype=True) + + +def transformGlyphMembers(g, m): + g.width = int(g.width * m.a) + g.Transform(m) + for a in g.anchors: + p = Point(a.p) + p.Transform(m) + a.p = p + for c in g.components: + # Assumes that components have also been individually transformed + p = Point(0,0) + d = Point(c.deltas[0]) + d.Transform(m) + p.Transform(m) + d1 = d - p + c.deltas[0].x = d1.x + c.deltas[0].y = d1.y + s = Point(c.scale) + s.Transform(m) + #c.scale = s + + +def swapContours(f,gName1,gName2): + try: + g1 = f[gName1] + g2 = f[gName2] + except KeyError: + log("swapGlyphs failed for %s %s" % (gName1, gName2)) + return + g3 = g1.copy() + + while g1.contours: + g1.removeContour(0) + for contour in g2.contours: + g1.appendContour(contour) + g1.width = g2.width + + while g2.contours: + g2.removeContour(0) + for contour in g3.contours: + g2.appendContour(contour) + g2.width = g3.width + + +def log(msg): + print msg + + +def generateGlyphs(f, glyphNames, glyphList={}): + log(">> Generating diacritics") + glyphnames = [gname for gname in glyphNames if not gname.startswith("#") and gname != ""] + + for glyphName in glyphNames: + generateGlyph(f, glyphName, glyphList) + +def cleanCurves(f): + log(">> Removing overlaps") + for g in f: + removeGlyphOverlap(g) + + # log(">> Mitring sharp corners") + # for g in f: + # mitreGlyph(g, 3., .7) + + # log(">> Converting curves to quadratic") + # for g in f: + # glyphCurvesToQuadratic(g) + + +def deleteGlyphs(f, deleteList): + for name in deleteList: + if f.has_key(name): + f.removeGlyph(name) + + +def removeGlyphOverlap(glyph): + """Remove overlaps in contours from a glyph.""" + #TODO(jamesgk) verify overlaps exist first, as per library's recommendation + manager = BooleanOperationManager() + contours = glyph.contours + glyph.clearContours() + manager.union(contours, glyph.getPointPen()) + + +def saveOTF(font, destFile, glyphOrder, truetype=False): + """Save a RoboFab font as an OTF binary using ufo2fdk.""" + + if truetype: + otf = compileTTF(font, featureCompilerClass=RobotoFeatureCompiler, + kernWriter=RobotoKernWriter, glyphOrder=glyphOrder, + convertCubics=False, + useProductionNames=False) + else: + otf = compileOTF(font, featureCompilerClass=RobotoFeatureCompiler, + kernWriter=RobotoKernWriter, glyphOrder=glyphOrder, + useProductionNames=False) + otf.save(destFile) diff --git a/misc/pylib/fontbuild/LICENSE b/misc/pylib/fontbuild/LICENSE new file mode 100644 index 000000000..261eeb9e9 --- /dev/null +++ b/misc/pylib/fontbuild/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/misc/pylib/fontbuild/ORIGIN.txt b/misc/pylib/fontbuild/ORIGIN.txt new file mode 100644 index 000000000..1b0a3cf79 --- /dev/null +++ b/misc/pylib/fontbuild/ORIGIN.txt @@ -0,0 +1 @@ +https://github.com/google/roboto/tree/master/scripts/lib/fontbuild diff --git a/misc/pylib/fontbuild/__init__.py b/misc/pylib/fontbuild/__init__.py new file mode 100644 index 000000000..4ed720308 --- /dev/null +++ b/misc/pylib/fontbuild/__init__.py @@ -0,0 +1,6 @@ +""" +fontbuild + +A collection of font production tools written for FontLab +""" +version = "0.1" \ No newline at end of file diff --git a/misc/pylib/fontbuild/alignpoints.py b/misc/pylib/fontbuild/alignpoints.py new file mode 100644 index 000000000..f49f24d95 --- /dev/null +++ b/misc/pylib/fontbuild/alignpoints.py @@ -0,0 +1,173 @@ +# Copyright 2015 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import math + +import numpy as np +from numpy.linalg import lstsq + + +def alignCorners(glyph, va, subsegments): + out = va.copy() + # for i,c in enumerate(subsegments): + # segmentCount = len(glyph.contours[i].segments) - 1 + # n = len(c) + # for j,s in enumerate(c): + # if j < segmentCount: + # seg = glyph.contours[i].segments[j] + # if seg.type == "line": + # subIndex = subsegmentIndex(i,j,subsegments) + # out[subIndex] = alignPoints(va[subIndex]) + + for i,c in enumerate(subsegments): + segmentCount = len(glyph.contours[i].segments) + n = len(c) + for j,s in enumerate(c): + if j < segmentCount - 1: + segType = glyph.contours[i].segments[j].type + segnextType = glyph.contours[i].segments[j+1].type + next = j+1 + elif j == segmentCount -1 and s[1] > 3: + segType = glyph.contours[i].segments[j].type + segNextType = "line" + next = j+1 + elif j == segmentCount: + segType = "line" + segnextType = glyph.contours[i].segments[1].type + if glyph.name == "J": + print s[1] + print segnextType + next = 1 + else: + break + if segType == "line" and segnextType == "line": + subIndex = subsegmentIndex(i,j,subsegments) + pts = va[subIndex] + ptsnext = va[subsegmentIndex(i,next,subsegments)] + # out[subIndex[-1]] = (out[subIndex[-1]] - 500) * 3 + 500 #findCorner(pts, ptsnext) + # print subIndex[-1], subIndex, subsegmentIndex(i,next,subsegments) + try: + out[subIndex[-1]] = findCorner(pts, ptsnext) + except: + pass + # print glyph.name, "Can't find corner: parallel lines" + return out + + +def subsegmentIndex(contourIndex, segmentIndex, subsegments): + # This whole thing is so dumb. Need a better data model for subsegments + + contourOffset = 0 + for i,c in enumerate(subsegments): + if i == contourIndex: + break + contourOffset += c[-1][0] + n = subsegments[contourIndex][-1][0] + # print contourIndex, contourOffset, n + startIndex = subsegments[contourIndex][segmentIndex-1][0] + segmentCount = subsegments[contourIndex][segmentIndex][1] + endIndex = (startIndex + segmentCount + 1) % (n) + + indices = np.array([(startIndex + i) % (n) + contourOffset for i in range(segmentCount + 1)]) + return indices + + +def alignPoints(pts, start=None, end=None): + if start == None or end == None: + start, end = fitLine(pts) + out = pts.copy() + for i,p in enumerate(pts): + out[i] = nearestPoint(start, end, p) + return out + + +def findCorner(pp, nn): + if len(pp) < 4 or len(nn) < 4: + assert 0, "line too short to fit" + pStart,pEnd = fitLine(pp) + nStart,nEnd = fitLine(nn) + prev = pEnd - pStart + next = nEnd - nStart + # print int(np.arctan2(prev[1],prev[0]) / math.pi * 180), + # print int(np.arctan2(next[1],next[0]) / math.pi * 180) + # if lines are parallel, return simple average of end and start points + if np.dot(prev / np.linalg.norm(prev), + next / np.linalg.norm(next)) > .999999: + # print "parallel lines", np.arctan2(prev[1],prev[0]), np.arctan2(next[1],next[0]) + # print prev, next + assert 0, "parallel lines" + if glyph.name is None: + # Never happens, but here to fix a bug in Python 2.7 with -OO + print '' + return lineIntersect(pStart, pEnd, nStart, nEnd) + + +def lineIntersect((x1,y1),(x2,y2),(x3,y3),(x4,y4)): + x12 = x1 - x2 + x34 = x3 - x4 + y12 = y1 - y2 + y34 = y3 - y4 + + det = x12 * y34 - y12 * x34 + if det == 0: + print "parallel!" + + a = x1 * y2 - y1 * x2 + b = x3 * y4 - y3 * x4 + + x = (a * x34 - b * x12) / det + y = (a * y34 - b * y12) / det + + return (x,y) + + +def fitLineLSQ(pts): + "returns a line fit with least squares. Fails for vertical lines" + n = len(pts) + a = np.ones((n,2)) + for i in range(n): + a[i,0] = pts[i,0] + line = lstsq(a,pts[:,1])[0] + return line + + +def fitLine(pts): + """returns a start vector and direction vector + Assumes points segments that already form a somewhat smooth line + """ + n = len(pts) + if n < 1: + return (0,0),(0,0) + a = np.zeros((n-1,2)) + for i in range(n-1): + v = pts[i] - pts[i+1] + a[i] = v / np.linalg.norm(v) + direction = np.mean(a[1:-1], axis=0) + start = np.mean(pts[1:-1], axis=0) + return start, start+direction + + +def nearestPoint(a,b,c): + "nearest point to point c on line a_b" + magnitude = np.linalg.norm(b-a) + if magnitude == 0: + raise Exception, "Line segment cannot be 0 length" + return (b-a) * np.dot((c-a) / magnitude, (b-a) / magnitude) + a + + +# pts = np.array([[1,1],[2,2],[3,3],[4,4]]) +# pts2 = np.array([[1,0],[2,0],[3,0],[4,0]]) +# print alignPoints(pts2, start = pts[0], end = pts[0]+pts[0]) +# # print findCorner(pts,pts2) diff --git a/misc/pylib/fontbuild/anchors.py b/misc/pylib/fontbuild/anchors.py new file mode 100644 index 000000000..a617b2f51 --- /dev/null +++ b/misc/pylib/fontbuild/anchors.py @@ -0,0 +1,77 @@ +# Copyright 2015 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +def getGlyph(gname, font): + return font[gname] if font.has_key(gname) else None + + +def getComponentByName(f, g, componentName): + for c in g.components: + if c.baseGlyph == componentName: + return c + +def getAnchorByName(g,anchorName): + for a in g.anchors: + if a.name == anchorName: + return a + +def moveMarkAnchors(f, g, anchorName, accentName, dx, dy): + if "top"==anchorName: + anchors = f[accentName].anchors + for anchor in anchors: + if "mkmktop_acc" == anchor.name: + for anc in g.anchors: + if anc.name == "top": + g.removeAnchor(anc) + break + g.appendAnchor("top", (anchor.x + int(dx), anchor.y + int(dy))) + + elif anchorName in ["bottom", "bottomu"]: + anchors = f[accentName].anchors + for anchor in anchors: + if "mkmkbottom_acc" == anchor.name: + for anc in g.anchors: + if anc.name == "bottom": + g.removeAnchor(anc) + break + x = anchor.x + int(dx) + for anc in anchors: + if "top" == anc.name: + x = anc.x + int(dx) + g.appendAnchor("bottom", (x, anchor.y + int(dy))) + + +def alignComponentToAnchor(f,glyphName,baseName,accentName,anchorName): + g = getGlyph(glyphName,f) + base = getGlyph(baseName,f) + accent = getGlyph(accentName,f) + if g == None or base == None or accent == None: + return + a1 = getAnchorByName(base,anchorName) + a2 = getAnchorByName(accent,"_" + anchorName) + if a1 == None or a2 == None: + return + offset = (a1.x - a2.x, a1.y - a2.y) + c = getComponentByName(f, g, accentName) + c.offset = offset + moveMarkAnchors(f, g, anchorName, accentName, offset[0], offset[1]) + + +def alignComponentsToAnchors(f,glyphName,baseName,accentNames): + for a in accentNames: + if len(a) == 1: + continue + alignComponentToAnchor(f,glyphName,baseName,a[0],a[1]) + diff --git a/misc/pylib/fontbuild/convertCurves.py b/misc/pylib/fontbuild/convertCurves.py new file mode 100644 index 000000000..b6efd5ca2 --- /dev/null +++ b/misc/pylib/fontbuild/convertCurves.py @@ -0,0 +1,102 @@ +#! /usr/bin/env python +# +# Copyright 2015 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +Converts a cubic bezier curve to a quadratic spline with +exactly two off curve points. + +""" + +import numpy +from numpy import array,cross,dot +from fontTools.misc import bezierTools +from robofab.objects.objectsRF import RSegment + +def replaceSegments(contour, segments): + while len(contour): + contour.removeSegment(0) + for s in segments: + contour.appendSegment(s.type, [(p.x, p.y) for p in s.points], s.smooth) + +def calcIntersect(a,b,c,d): + numpy.seterr(all='raise') + e = b-a + f = d-c + p = array([-e[1], e[0]]) + try: + h = dot((a-c),p) / dot(f,p) + except: + print a,b,c,d + raise + return c + dot(f,h) + +def simpleConvertToQuadratic(p0,p1,p2,p3): + p = [array(i.x,i.y) for i in [p0,p1,p2,p3]] + off = calcIntersect(p[0],p[1],p[2],p[3]) + +# OFFCURVE_VECTOR_CORRECTION = -.015 +OFFCURVE_VECTOR_CORRECTION = 0 + +def convertToQuadratic(p0,p1,p2,p3): + # TODO: test for accuracy and subdivide further if needed + p = [(i.x,i.y) for i in [p0,p1,p2,p3]] + # if p[0][0] == p[1][0] and p[0][0] == p[2][0] and p[0][0] == p[2][0] and p[0][0] == p[3][0]: + # return (p[0],p[1],p[2],p[3]) + # if p[0][1] == p[1][1] and p[0][1] == p[2][1] and p[0][1] == p[2][1] and p[0][1] == p[3][1]: + # return (p[0],p[1],p[2],p[3]) + seg1,seg2 = bezierTools.splitCubicAtT(p[0], p[1], p[2], p[3], .5) + pts1 = [array([i[0], i[1]]) for i in seg1] + pts2 = [array([i[0], i[1]]) for i in seg2] + on1 = seg1[0] + on2 = seg2[3] + try: + off1 = calcIntersect(pts1[0], pts1[1], pts1[2], pts1[3]) + off2 = calcIntersect(pts2[0], pts2[1], pts2[2], pts2[3]) + except: + return (p[0],p[1],p[2],p[3]) + off1 = (on1 - off1) * OFFCURVE_VECTOR_CORRECTION + off1 + off2 = (on2 - off2) * OFFCURVE_VECTOR_CORRECTION + off2 + return (on1,off1,off2,on2) + +def cubicSegmentToQuadratic(c,sid): + + segment = c[sid] + if (segment.type != "curve"): + print "Segment type not curve" + return + + #pSegment,junk = getPrevAnchor(c,sid) + pSegment = c[sid-1] #assumes that a curve type will always be proceeded by another point on the same contour + points = convertToQuadratic(pSegment.points[-1],segment.points[0], + segment.points[1],segment.points[2]) + return RSegment( + 'qcurve', [[int(i) for i in p] for p in points[1:]], segment.smooth) + +def glyphCurvesToQuadratic(g): + + for c in g: + segments = [] + for i in range(len(c)): + s = c[i] + if s.type == "curve": + try: + segments.append(cubicSegmentToQuadratic(c, i)) + except Exception: + print g.name, i + raise + else: + segments.append(s) + replaceSegments(c, segments) diff --git a/misc/pylib/fontbuild/curveFitPen.py b/misc/pylib/fontbuild/curveFitPen.py new file mode 100644 index 000000000..f7c0caed9 --- /dev/null +++ b/misc/pylib/fontbuild/curveFitPen.py @@ -0,0 +1,422 @@ +#! /opt/local/bin/pythonw2.7 +# +# Copyright 2015 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +__all__ = ["SubsegmentPen","SubsegmentsToCurvesPen", "segmentGlyph", "fitGlyph"] + + +from fontTools.pens.basePen import BasePen +import numpy as np +from numpy import array as v +from numpy.linalg import norm +from robofab.pens.adapterPens import GuessSmoothPointPen +from robofab.pens.pointPen import BasePointToSegmentPen + + +class SubsegmentsToCurvesPointPen(BasePointToSegmentPen): + def __init__(self, glyph, subsegmentGlyph, subsegments): + BasePointToSegmentPen.__init__(self) + self.glyph = glyph + self.subPen = SubsegmentsToCurvesPen(None, glyph.getPen(), subsegmentGlyph, subsegments) + + def setMatchTangents(self, b): + self.subPen.matchTangents = b + + def _flushContour(self, segments): + # + # adapted from robofab.pens.adapterPens.rfUFOPointPen + # + assert len(segments) >= 1 + # if we only have one point and it has a name, we must have an anchor + first = segments[0] + segmentType, points = first + pt, smooth, name, kwargs = points[0] + if len(segments) == 1 and name != None: + self.glyph.appendAnchor(name, pt) + return + else: + segmentType, points = segments[-1] + movePt, smooth, name, kwargs = points[-1] + if smooth: + # last point is smooth, set pen to start smooth + self.subPen.setLastSmooth(True) + if segmentType == 'line': + del segments[-1] + + self.subPen.moveTo(movePt) + + # do the rest of the segments + for segmentType, points in segments: + isSmooth = True in [smooth for pt, smooth, name, kwargs in points] + pp = [pt for pt, smooth, name, kwargs in points] + if segmentType == "line": + assert len(pp) == 1 + if isSmooth: + self.subPen.smoothLineTo(pp[0]) + else: + self.subPen.lineTo(pp[0]) + elif segmentType == "curve": + assert len(pp) == 3 + if isSmooth: + self.subPen.smoothCurveTo(*pp) + else: + self.subPen.curveTo(*pp) + elif segmentType == "qcurve": + assert 0, "qcurve not supported" + else: + assert 0, "illegal segmentType: %s" % segmentType + self.subPen.closePath() + + def addComponent(self, glyphName, transform): + self.subPen.addComponent(glyphName, transform) + + +class SubsegmentsToCurvesPen(BasePen): + def __init__(self, glyphSet, otherPen, subsegmentGlyph, subsegments): + BasePen.__init__(self, None) + self.otherPen = otherPen + self.ssglyph = subsegmentGlyph + self.subsegments = subsegments + self.contourIndex = -1 + self.segmentIndex = -1 + self.lastPoint = (0,0) + self.lastSmooth = False + self.nextSmooth = False + + def setLastSmooth(self, b): + self.lastSmooth = b + + def _moveTo(self, (x, y)): + self.contourIndex += 1 + self.segmentIndex = 0 + self.startPoint = (x,y) + p = self.ssglyph.contours[self.contourIndex][0].points[0] + self.otherPen.moveTo((p.x, p.y)) + self.lastPoint = (x,y) + + def _lineTo(self, (x, y)): + self.segmentIndex += 1 + index = self.subsegments[self.contourIndex][self.segmentIndex][0] + p = self.ssglyph.contours[self.contourIndex][index].points[0] + self.otherPen.lineTo((p.x, p.y)) + self.lastPoint = (x,y) + self.lastSmooth = False + + def smoothLineTo(self, (x, y)): + self.lineTo((x,y)) + self.lastSmooth = True + + def smoothCurveTo(self, (x1, y1), (x2, y2), (x3, y3)): + self.nextSmooth = True + self.curveTo((x1, y1), (x2, y2), (x3, y3)) + self.nextSmooth = False + self.lastSmooth = True + + def _curveToOne(self, (x1, y1), (x2, y2), (x3, y3)): + self.segmentIndex += 1 + c = self.ssglyph.contours[self.contourIndex] + n = len(c) + startIndex = (self.subsegments[self.contourIndex][self.segmentIndex-1][0]) + segmentCount = (self.subsegments[self.contourIndex][self.segmentIndex][1]) + endIndex = (startIndex + segmentCount + 1) % (n) + + indices = [(startIndex + i) % (n) for i in range(segmentCount + 1)] + points = np.array([(c[i].points[0].x, c[i].points[0].y) for i in indices]) + prevPoint = (c[(startIndex - 1)].points[0].x, c[(startIndex - 1)].points[0].y) + nextPoint = (c[(endIndex) % n].points[0].x, c[(endIndex) % n].points[0].y) + prevTangent = prevPoint - points[0] + nextTangent = nextPoint - points[-1] + + tangent1 = points[1] - points[0] + tangent3 = points[-2] - points[-1] + prevTangent /= np.linalg.norm(prevTangent) + nextTangent /= np.linalg.norm(nextTangent) + tangent1 /= np.linalg.norm(tangent1) + tangent3 /= np.linalg.norm(tangent3) + + tangent1, junk = self.smoothTangents(tangent1, prevTangent, self.lastSmooth) + tangent3, junk = self.smoothTangents(tangent3, nextTangent, self.nextSmooth) + if self.matchTangents == True: + cp = fitBezier(points, tangent1, tangent3) + cp[1] = norm(cp[1] - cp[0]) * tangent1 / norm(tangent1) + cp[0] + cp[2] = norm(cp[2] - cp[3]) * tangent3 / norm(tangent3) + cp[3] + else: + cp = fitBezier(points) + # if self.ssglyph.name == 'r': + # print "-----------" + # print self.lastSmooth, self.nextSmooth + # print "%i %i : %i %i \n %i %i : %i %i \n %i %i : %i %i"%(x1,y1, cp[1,0], cp[1,1], x2,y2, cp[2,0], cp[2,1], x3,y3, cp[3,0], cp[3,1]) + self.otherPen.curveTo((cp[1,0], cp[1,1]), (cp[2,0], cp[2,1]), (cp[3,0], cp[3,1])) + self.lastPoint = (x3, y3) + self.lastSmooth = False + + def smoothTangents(self,t1,t2,forceSmooth = False): + if forceSmooth or (abs(t1.dot(t2)) > .95 and norm(t1-t2) > 1): + # print t1,t2, + t1 = (t1 - t2) / 2 + t2 = -t1 + # print t1,t2 + return t1 / norm(t1), t2 / norm(t2) + + def _closePath(self): + self.otherPen.closePath() + + def _endPath(self): + self.otherPen.endPath() + + def addComponent(self, glyphName, transformation): + self.otherPen.addComponent(glyphName, transformation) + + +class SubsegmentPointPen(BasePointToSegmentPen): + def __init__(self, glyph, resolution): + BasePointToSegmentPen.__init__(self) + self.glyph = glyph + self.resolution = resolution + self.subPen = SubsegmentPen(None, glyph.getPen()) + + def getSubsegments(self): + return self.subPen.subsegments[:] + + def _flushContour(self, segments): + # + # adapted from robofab.pens.adapterPens.rfUFOPointPen + # + assert len(segments) >= 1 + # if we only have one point and it has a name, we must have an anchor + first = segments[0] + segmentType, points = first + pt, smooth, name, kwargs = points[0] + if len(segments) == 1 and name != None: + self.glyph.appendAnchor(name, pt) + return + else: + segmentType, points = segments[-1] + movePt, smooth, name, kwargs = points[-1] + if segmentType == 'line': + del segments[-1] + + self.subPen.moveTo(movePt) + + # do the rest of the segments + for segmentType, points in segments: + points = [pt for pt, smooth, name, kwargs in points] + if segmentType == "line": + assert len(points) == 1 + self.subPen.lineTo(points[0]) + elif segmentType == "curve": + assert len(points) == 3 + self.subPen.curveTo(*points) + elif segmentType == "qcurve": + assert 0, "qcurve not supported" + else: + assert 0, "illegal segmentType: %s" % segmentType + self.subPen.closePath() + + def addComponent(self, glyphName, transform): + self.subPen.addComponent(glyphName, transform) + + +class SubsegmentPen(BasePen): + + def __init__(self, glyphSet, otherPen, resolution=25): + BasePen.__init__(self,glyphSet) + self.resolution = resolution + self.otherPen = otherPen + self.subsegments = [] + self.startContour = (0,0) + self.contourIndex = -1 + + def _moveTo(self, (x, y)): + self.contourIndex += 1 + self.segmentIndex = 0 + self.subsegments.append([]) + self.subsegmentCount = 0 + self.subsegments[self.contourIndex].append([self.subsegmentCount, 0]) + self.startContour = (x,y) + self.lastPoint = (x,y) + self.otherPen.moveTo((x,y)) + + def _lineTo(self, (x, y)): + count = self.stepsForSegment((x,y),self.lastPoint) + if count < 1: + count = 1 + self.subsegmentCount += count + self.subsegments[self.contourIndex].append([self.subsegmentCount, count]) + for i in range(1,count+1): + x1 = self.lastPoint[0] + (x - self.lastPoint[0]) * i/float(count) + y1 = self.lastPoint[1] + (y - self.lastPoint[1]) * i/float(count) + self.otherPen.lineTo((x1,y1)) + self.lastPoint = (x,y) + + def _curveToOne(self, (x1, y1), (x2, y2), (x3, y3)): + count = self.stepsForSegment((x3,y3),self.lastPoint) + if count < 2: + count = 2 + self.subsegmentCount += count + self.subsegments[self.contourIndex].append([self.subsegmentCount,count]) + x = self.renderCurve((self.lastPoint[0],x1,x2,x3),count) + y = self.renderCurve((self.lastPoint[1],y1,y2,y3),count) + assert len(x) == count + if (x3 == self.startContour[0] and y3 == self.startContour[1]): + count -= 1 + for i in range(count): + self.otherPen.lineTo((x[i],y[i])) + self.lastPoint = (x3,y3) + + def _closePath(self): + if not (self.lastPoint[0] == self.startContour[0] and self.lastPoint[1] == self.startContour[1]): + self._lineTo(self.startContour) + + # round values used by otherPen (a RoboFab SegmentToPointPen) to decide + # whether to delete duplicate points at start and end of contour + #TODO(jamesgk) figure out why we have to do this hack, then remove it + c = self.otherPen.contour + for i in [0, -1]: + c[i] = [[round(n, 5) for n in c[i][0]]] + list(c[i][1:]) + + self.otherPen.closePath() + + def _endPath(self): + self.otherPen.endPath() + + def addComponent(self, glyphName, transformation): + self.otherPen.addComponent(glyphName, transformation) + + def stepsForSegment(self, p1, p2): + dist = np.linalg.norm(v(p1) - v(p2)) + out = int(dist / self.resolution) + return out + + def renderCurve(self,p,count): + curvePoints = [] + t = 1.0 / float(count) + temp = t * t + + f = p[0] + fd = 3 * (p[1] - p[0]) * t + fdd_per_2 = 3 * (p[0] - 2 * p[1] + p[2]) * temp + fddd_per_2 = 3 * (3 * (p[1] - p[2]) + p[3] - p[0]) * temp * t + + fddd = fddd_per_2 + fddd_per_2 + fdd = fdd_per_2 + fdd_per_2 + fddd_per_6 = fddd_per_2 * (1.0 / 3) + + for i in range(count): + f = f + fd + fdd_per_2 + fddd_per_6 + fd = fd + fdd + fddd_per_2 + fdd = fdd + fddd + fdd_per_2 = fdd_per_2 + fddd_per_2 + curvePoints.append(f) + + return curvePoints + + +def fitBezierSimple(pts): + T = [np.linalg.norm(pts[i]-pts[i-1]) for i in range(1,len(pts))] + tsum = np.sum(T) + T = [0] + T + T = [np.sum(T[0:i+1])/tsum for i in range(len(pts))] + T = [[t**3, t**2, t, 1] for t in T] + T = np.array(T) + M = np.array([[-1, 3, -3, 1], + [ 3, -6, 3, 0], + [-3, 3, 0, 0], + [ 1, 0, 0, 0]]) + T = T.dot(M) + T = np.concatenate((T, np.array([[100,0,0,0], [0,0,0,100]]))) + # pts = np.vstack((pts, pts[0] * 100, pts[-1] * 100)) + C = np.linalg.lstsq(T, pts) + return C[0] + + +def subdivideLineSegment(pts): + out = [pts[0]] + for i in range(1, len(pts)): + out.append(pts[i-1] + (pts[i] - pts[i-1]) * .5) + out.append(pts[i]) + return np.array(out) + + +def fitBezier(pts,tangent0=None,tangent3=None): + if len(pts < 4): + pts = subdivideLineSegment(pts) + T = [np.linalg.norm(pts[i]-pts[i-1]) for i in range(1,len(pts))] + tsum = np.sum(T) + T = [0] + T + T = [np.sum(T[0:i+1])/tsum for i in range(len(pts))] + T = [[t**3, t**2, t, 1] for t in T] + T = np.array(T) + M = np.array([[-1, 3, -3, 1], + [ 3, -6, 3, 0], + [-3, 3, 0, 0], + [ 1, 0, 0, 0]]) + T = T.dot(M) + n = len(pts) + pout = pts.copy() + pout[:,0] -= (T[:,0] * pts[0,0]) + (T[:,3] * pts[-1,0]) + pout[:,1] -= (T[:,0] * pts[0,1]) + (T[:,3] * pts[-1,1]) + + TT = np.zeros((n*2,4)) + for i in range(n): + for j in range(2): + TT[i*2,j*2] = T[i,j+1] + TT[i*2+1,j*2+1] = T[i,j+1] + pout = pout.reshape((n*2,1),order="C") + + if tangent0 != None and tangent3 != None: + tangentConstraintsT = np.array([ + [tangent0[1], -tangent0[0], 0, 0], + [0, 0, tangent3[1], -tangent3[0]] + ]) + tangentConstraintsP = np.array([ + [pts[0][1] * -tangent0[0] + pts[0][0] * tangent0[1]], + [pts[-1][1] * -tangent3[0] + pts[-1][0] * tangent3[1]] + ]) + TT = np.concatenate((TT, tangentConstraintsT * 1000)) + pout = np.concatenate((pout, tangentConstraintsP * 1000)) + C = np.linalg.lstsq(TT,pout)[0].reshape((2,2)) + return np.array([pts[0], C[0], C[1], pts[-1]]) + + +def segmentGlyph(glyph,resolution=50): + g1 = glyph.copy() + g1.clear() + dp = SubsegmentPointPen(g1, resolution) + glyph.drawPoints(dp) + return g1, dp.getSubsegments() + + +def fitGlyph(glyph, subsegmentGlyph, subsegmentIndices, matchTangents=True): + outGlyph = glyph.copy() + outGlyph.clear() + fitPen = SubsegmentsToCurvesPointPen(outGlyph, subsegmentGlyph, subsegmentIndices) + fitPen.setMatchTangents(matchTangents) + # smoothPen = GuessSmoothPointPen(fitPen) + glyph.drawPoints(fitPen) + outGlyph.width = subsegmentGlyph.width + return outGlyph + + +if __name__ == '__main__': + p = SubsegmentPen(None, None) + pts = np.array([ + [0,0], + [.5,.5], + [.5,.5], + [1,1] + ]) + print np.array(p.renderCurve(pts,10)) * 10 diff --git a/misc/pylib/fontbuild/decomposeGlyph.py b/misc/pylib/fontbuild/decomposeGlyph.py new file mode 100644 index 000000000..0470fa60b --- /dev/null +++ b/misc/pylib/fontbuild/decomposeGlyph.py @@ -0,0 +1,23 @@ +def decomposeGlyph(font, glyph): + """Moves the components of a glyph to its outline.""" + if len(glyph.components): + deepCopyContours(font, glyph, glyph, (0, 0), (1, 1)) + glyph.clearComponents() + + +def deepCopyContours(font, parent, component, offset, scale): + """Copy contours to parent from component, including nested components.""" + + for nested in component.components: + deepCopyContours( + font, parent, font[nested.baseGlyph], + (offset[0] + nested.offset[0], offset[1] + nested.offset[1]), + (scale[0] * nested.scale[0], scale[1] * nested.scale[1])) + + if component == parent: + return + for contour in component: + contour = contour.copy() + contour.scale(scale) + contour.move(offset) + parent.appendContour(contour) diff --git a/misc/pylib/fontbuild/features.py b/misc/pylib/fontbuild/features.py new file mode 100755 index 000000000..fe6eca012 --- /dev/null +++ b/misc/pylib/fontbuild/features.py @@ -0,0 +1,189 @@ +# Copyright 2015 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import re + +from feaTools import parser +from feaTools.writers.fdkSyntaxWriter import FDKSyntaxFeatureWriter + + +class FilterFeatureWriter(FDKSyntaxFeatureWriter): + """Feature writer to detect invalid references and duplicate definitions.""" + + def __init__(self, refs=set(), name=None, isFeature=False): + """Initializes the set of known references, empty by default.""" + self.refs = refs + self.featureNames = set() + self.lookupNames = set() + self.tableNames = set() + self.languageSystems = set() + super(FilterFeatureWriter, self).__init__( + name=name, isFeature=isFeature) + + # error to print when undefined reference is found in glyph class + self.classErr = ('Undefined reference "%s" removed from glyph class ' + 'definition %s.') + + # error to print when undefined reference is found in sub or pos rule + subErr = ['Substitution rule with undefined reference "%s" removed'] + if self._name: + subErr.append(" from ") + subErr.append("feature" if self._isFeature else "lookup") + subErr.append(' "%s"' % self._name) + subErr.append(".") + self.subErr = "".join(subErr) + self.posErr = self.subErr.replace("Substitution", "Positioning") + + def _subwriter(self, name, isFeature): + """Use this class for nested expressions e.g. in feature definitions.""" + return FilterFeatureWriter(self.refs, name, isFeature) + + def _flattenRefs(self, refs, flatRefs): + """Flatten a list of references.""" + for ref in refs: + if type(ref) == list: + self._flattenRefs(ref, flatRefs) + elif ref != "'": # ignore contextual class markings + flatRefs.append(ref) + + def _checkRefs(self, refs, errorMsg): + """Check a list of references found in a sub or pos rule.""" + flatRefs = [] + self._flattenRefs(refs, flatRefs) + for ref in flatRefs: + # trailing apostrophes should be ignored + if ref[-1] == "'": + ref = ref[:-1] + if ref not in self.refs: + print errorMsg % ref + # insert an empty instruction so that we can't end up with an + # empty block, which is illegal syntax + super(FilterFeatureWriter, self).rawText(";") + return False + return True + + def classDefinition(self, name, contents): + """Check that contents are valid, then add name to known references.""" + if name in self.refs: + return + newContents = [] + for ref in contents: + if ref not in self.refs and ref != "-": + print self.classErr % (ref, name) + else: + newContents.append(ref) + self.refs.add(name) + super(FilterFeatureWriter, self).classDefinition(name, newContents) + + def gsubType1(self, target, replacement): + """Check a sub rule with one-to-one replacement.""" + if self._checkRefs([target, replacement], self.subErr): + super(FilterFeatureWriter, self).gsubType1(target, replacement) + + def gsubType4(self, target, replacement): + """Check a sub rule with many-to-one replacement.""" + if self._checkRefs([target, replacement], self.subErr): + super(FilterFeatureWriter, self).gsubType4(target, replacement) + + def gsubType6(self, precedingContext, target, trailingContext, replacement): + """Check a sub rule with contextual replacement.""" + refs = [precedingContext, target, trailingContext, replacement] + if self._checkRefs(refs, self.subErr): + super(FilterFeatureWriter, self).gsubType6( + precedingContext, target, trailingContext, replacement) + + def gposType1(self, target, value): + """Check a single positioning rule.""" + if self._checkRefs([target], self.posErr): + super(FilterFeatureWriter, self).gposType1(target, value) + + def gposType2(self, target, value, needEnum=False): + """Check a pair positioning rule.""" + if self._checkRefs(target, self.posErr): + super(FilterFeatureWriter, self).gposType2(target, value, needEnum) + + # these rules may contain references, but they aren't present in Roboto + def gsubType3(self, target, replacement): + raise NotImplementedError + + def feature(self, name): + """Adds a feature definition only once.""" + if name not in self.featureNames: + self.featureNames.add(name) + return super(FilterFeatureWriter, self).feature(name) + # we must return a new writer even if we don't add it to this one + return FDKSyntaxFeatureWriter(name, True) + + def lookup(self, name): + """Adds a lookup block only once.""" + if name not in self.lookupNames: + self.lookupNames.add(name) + return super(FilterFeatureWriter, self).lookup(name) + # we must return a new writer even if we don't add it to this one + return FDKSyntaxFeatureWriter(name, False) + + def languageSystem(self, langTag, scriptTag): + """Adds a language system instruction only once.""" + system = (langTag, scriptTag) + if system not in self.languageSystems: + self.languageSystems.add(system) + super(FilterFeatureWriter, self).languageSystem(langTag, scriptTag) + + def table(self, name, data): + """Adds a table only once.""" + if name in self.tableNames: + return + self.tableNames.add(name) + self._instructions.append("table %s {" % name) + self._instructions.extend([" %s %s;" % line for line in data]) + self._instructions.append("} %s;" % name) + + +def compileFeatureRE(name): + """Compiles a feature-matching regex.""" + + # this is the pattern used internally by feaTools: + # https://github.com/typesupply/feaTools/blob/master/Lib/feaTools/parser.py + featureRE = list(parser.featureContentRE) + featureRE.insert(2, name) + featureRE.insert(6, name) + return re.compile("".join(featureRE)) + + +def updateFeature(font, name, value): + """Add a feature definition, or replace existing one.""" + featureRE = compileFeatureRE(name) + if featureRE.search(font.features.text): + font.features.text = featureRE.sub(value, font.features.text) + else: + font.features.text += "\n" + value + + +def readFeatureFile(font, text, prepend=True): + """Incorporate valid definitions from feature text into font.""" + writer = FilterFeatureWriter(set(font.keys())) + if prepend: + text += font.features.text + else: + text = font.features.text + text + parser.parseFeatures(writer, text) + font.features.text = writer.write() + + +def writeFeatureFile(font, path): + """Write the font's features to an external file.""" + fout = open(path, "w") + fout.write(font.features.text) + fout.close() diff --git a/misc/pylib/fontbuild/generateGlyph.py b/misc/pylib/fontbuild/generateGlyph.py new file mode 100644 index 000000000..465f940a9 --- /dev/null +++ b/misc/pylib/fontbuild/generateGlyph.py @@ -0,0 +1,97 @@ +# Copyright 2015 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import re +from string import find + +from anchors import alignComponentsToAnchors, getAnchorByName + + +def parseComposite(composite): + c = composite.split("=") + d = c[1].split("/") + glyphName = d[0] + if len(d) == 1: + offset = [0, 0] + else: + offset = [int(i) for i in d[1].split(",")] + accentString = c[0] + accents = accentString.split("+") + baseName = accents.pop(0) + accentNames = [i.split(":") for i in accents] + return (glyphName, baseName, accentNames, offset) + + +def copyMarkAnchors(f, g, srcname, width): + for anchor in f[srcname].anchors: + if anchor.name in ("top_dd", "bottom_dd", "top0315"): + g.appendAnchor(anchor.name, (anchor.x + width, anchor.y)) + + if ("top" == anchor.name and + not any(a.name == "parent_top" for a in g.anchors)): + g.appendAnchor("parent_top", anchor.position) + + if ("bottom" == anchor.name and + not any(a.name == "bottom" for a in g.anchors)): + g.appendAnchor("bottom", anchor.position) + + if any(a.name == "top" for a in g.anchors): + return + + anchor_parent_top = getAnchorByName(g, "parent_top") + if anchor_parent_top is not None: + g.appendAnchor("top", anchor_parent_top.position) + + +def generateGlyph(f,gname,glyphList={}): + glyphName, baseName, accentNames, offset = parseComposite(gname) + if f.has_key(glyphName): + print('Existing glyph "%s" found in font, ignoring composition rule ' + '"%s"' % (glyphName, gname)) + return + + if baseName.find("_") != -1: + g = f.newGlyph(glyphName) + for componentName in baseName.split("_"): + g.appendComponent(componentName, (g.width, 0)) + g.width += f[componentName].width + setUnicodeValue(g, glyphList) + + else: + try: + f.compileGlyph(glyphName, baseName, accentNames) + except KeyError as e: + print('KeyError raised for composition rule "%s", likely "%s" ' + 'anchor not found in glyph "%s"' % (gname, e, baseName)) + return + g = f[glyphName] + setUnicodeValue(g, glyphList) + copyMarkAnchors(f, g, baseName, offset[1] + offset[0]) + if len(accentNames) > 0: + alignComponentsToAnchors(f, glyphName, baseName, accentNames) + if offset[0] != 0 or offset[1] != 0: + g.width += offset[1] + offset[0] + g.move((offset[0], 0), anchors=False) + + +def setUnicodeValue(glyph, glyphList): + """Try to ensure glyph has a unicode value -- used by FDK to make OTFs.""" + + if glyph.name in glyphList: + glyph.unicode = int(glyphList[glyph.name], 16) + else: + uvNameMatch = re.match("uni([\dA-F]{4})$", glyph.name) + if uvNameMatch: + glyph.unicode = int(uvNameMatch.group(1), 16) diff --git a/misc/pylib/fontbuild/instanceNames.py b/misc/pylib/fontbuild/instanceNames.py new file mode 100644 index 000000000..cf87ba719 --- /dev/null +++ b/misc/pylib/fontbuild/instanceNames.py @@ -0,0 +1,232 @@ +# Copyright 2015 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from datetime import date +import re +from random import randint +import string + +class InstanceNames: + "Class that allows easy setting of FontLab name fields. TODO: Add proper italic flags" + + foundry = "" + foundryURL = "" + copyrightHolderName = "" + build = "" + version = "1.0" + year = date.today().year + designer = "" + designerURL = "" + license = "" + licenseURL = "" + + def __init__(self,names): + if type(names) == type(" "): + names = names.split("/") + #print names + self.longfamily = names[0] + self.longstyle = names[1] + self.shortstyle = names[2] + self.subfamilyAbbrev = names[3] + + self.width = self._getWidth() + self.italic = self._getItalic() + self.weight = self._getWeight() + self.fullname = "%s %s" %(self.longfamily, self.longstyle) + self.postscript = re.sub(' ','', self.longfamily) + "-" + re.sub(' ','',self.longstyle) + + if self.subfamilyAbbrev != "" and self.subfamilyAbbrev != None and self.subfamilyAbbrev != "Rg": + self.shortfamily = "%s %s" %(self.longfamily, self.longstyle.split()[0]) + else: + self.shortfamily = self.longfamily + + def setRFNames(self,f, version=1, versionMinor=0): + f.info.familyName = self.longfamily + f.info.styleName = self.longstyle + f.info.styleMapFamilyName = self.shortfamily + f.info.styleMapStyleName = self.shortstyle.lower() + f.info.versionMajor = version + f.info.versionMinor = versionMinor + f.info.year = self.year + if len(self.copyrightHolderName) > 0: + f.info.copyright = "Copyright %s %s" % (self.year, self.copyrightHolderName) + f.info.trademark = "%s is a trademark of %s." %(self.longfamily, self.foundry.rstrip('.')) + + if len(self.designer) > 0: + f.info.openTypeNameDesigner = self.designer + if len(self.designerURL) > 0: + f.info.openTypeNameDesignerURL = self.designerURL + f.info.openTypeNameManufacturer = self.foundry + f.info.openTypeNameManufacturerURL = self.foundryURL + f.info.openTypeNameLicense = self.license + f.info.openTypeNameLicenseURL = self.licenseURL + f.info.openTypeNameVersion = "Version %i.%i" %(version, versionMinor) + + if self.build is not None and len(self.build): + f.info.openTypeNameUniqueID = "%s:%s:%s" %(self.fullname, self.build, self.year) + else: + f.info.openTypeNameUniqueID = "%s:%s" %(self.fullname, self.year) + + # f.info.openTypeNameDescription = "" + # f.info.openTypeNameCompatibleFullName = "" + # f.info.openTypeNameSampleText = "" + if (self.subfamilyAbbrev != "Rg"): + f.info.openTypeNamePreferredFamilyName = self.longfamily + f.info.openTypeNamePreferredSubfamilyName = self.longstyle + + f.info.openTypeOS2WeightClass = self._getWeightCode(self.weight) + f.info.macintoshFONDName = re.sub(' ','',self.longfamily) + " " + re.sub(' ','',self.longstyle) + f.info.postscriptFontName = f.info.macintoshFONDName.replace(" ", "-") + if self.italic: + f.info.italicAngle = -12.0 + + + def setFLNames(self,flFont): + + from FL import NameRecord + + flFont.family_name = self.shortfamily + flFont.mac_compatible = self.fullname + flFont.style_name = self.longstyle + flFont.full_name = self.fullname + flFont.font_name = self.postscript + flFont.font_style = self._getStyleCode() + flFont.menu_name = self.shortfamily + flFont.apple_name = re.sub(' ','',self.longfamily) + " " + re.sub(' ','',self.longstyle) + flFont.fond_id = randint(1000,9999) + flFont.pref_family_name = self.longfamily + flFont.pref_style_name = self.longstyle + flFont.weight = self.weight + flFont.weight_code = self._getWeightCode(self.weight) + flFont.width = self.width + if len(self.italic): + flFont.italic_angle = -12 + + fn = flFont.fontnames + fn.clean() + #fn.append(NameRecord(0,1,0,0, "Font data copyright %s %s" %(self.foundry, self.year) )) + #fn.append(NameRecord(0,3,1,1033, "Font data copyright %s %s" %(self.foundry, self.year) )) + copyrightHolderName = self.copyrightHolderName if len(self.copyrightHolderName) > 0 else self.foundry + fn.append(NameRecord(0,1,0,0, "Copyright %s %s" %(self.year, copyrightHolderName) )) + fn.append(NameRecord(0,3,1,1033, "Copyright %s %s" %(self.year, copyrightHolderName) )) + fn.append(NameRecord(1,1,0,0, self.longfamily )) + fn.append(NameRecord(1,3,1,1033, self.shortfamily )) + fn.append(NameRecord(2,1,0,0, self.longstyle )) + fn.append(NameRecord(2,3,1,1033, self.longstyle )) + #fn.append(NameRecord(3,1,0,0, "%s:%s:%s" %(self.foundry, self.longfamily, self.year) )) + #fn.append(NameRecord(3,3,1,1033, "%s:%s:%s" %(self.foundry, self.longfamily, self.year) )) + fn.append(NameRecord(3,1,0,0, "%s:%s:%s" %(self.foundry, self.fullname, self.year) )) + fn.append(NameRecord(3,3,1,1033, "%s:%s:%s" %(self.foundry, self.fullname, self.year) )) + fn.append(NameRecord(4,1,0,0, self.fullname )) + fn.append(NameRecord(4,3,1,1033, self.fullname )) + if len(self.build) > 0: + fn.append(NameRecord(5,1,0,0, "Version %s%s; %s" %(self.version, self.build, self.year) )) + fn.append(NameRecord(5,3,1,1033, "Version %s%s; %s" %(self.version, self.build, self.year) )) + else: + fn.append(NameRecord(5,1,0,0, "Version %s; %s" %(self.version, self.year) )) + fn.append(NameRecord(5,3,1,1033, "Version %s; %s" %(self.version, self.year) )) + fn.append(NameRecord(6,1,0,0, self.postscript )) + fn.append(NameRecord(6,3,1,1033, self.postscript )) + fn.append(NameRecord(7,1,0,0, "%s is a trademark of %s." %(self.longfamily, self.foundry) )) + fn.append(NameRecord(7,3,1,1033, "%s is a trademark of %s." %(self.longfamily, self.foundry) )) + fn.append(NameRecord(9,1,0,0, self.foundry )) + fn.append(NameRecord(9,3,1,1033, self.foundry )) + fn.append(NameRecord(11,1,0,0, self.foundryURL )) + fn.append(NameRecord(11,3,1,1033, self.foundryURL )) + fn.append(NameRecord(12,1,0,0, self.designer )) + fn.append(NameRecord(12,3,1,1033, self.designer )) + fn.append(NameRecord(13,1,0,0, self.license )) + fn.append(NameRecord(13,3,1,1033, self.license )) + fn.append(NameRecord(14,1,0,0, self.licenseURL )) + fn.append(NameRecord(14,3,1,1033, self.licenseURL )) + if (self.subfamilyAbbrev != "Rg"): + fn.append(NameRecord(16,3,1,1033, self.longfamily )) + fn.append(NameRecord(17,3,1,1033, self.longstyle)) + #else: + #fn.append(NameRecord(17,3,1,1033,"")) + #fn.append(NameRecord(18,1,0,0, re.sub("Italic","It", self.fullname))) + + def _getSubstyle(self, regex): + substyle = re.findall(regex, self.longstyle) + if len(substyle) > 0: + return substyle[0] + else: + return "" + + def _getItalic(self): + return self._getSubstyle(r"Italic|Oblique|Obliq") + + def _getWeight(self): + w = self._getSubstyle(r"Extrabold|Superbold|Super|Fat|Black|Bold|Semibold|Demibold|Medium|Light|Thin") + if w == "": + w = "Regular" + return w + + def _getWidth(self): + w = self._getSubstyle(r"Condensed|Extended|Narrow|Wide") + if w == "": + w = "Normal" + return w + + def _getStyleCode(self): + #print "shortstyle:", self.shortstyle + styleCode = 0 + if self.shortstyle == "Bold": + styleCode = 32 + if self.shortstyle == "Italic": + styleCode = 1 + if self.shortstyle == "Bold Italic": + styleCode = 33 + if self.longstyle == "Regular": + styleCode = 64 + return styleCode + + def _getWeightCode(self,weight): + if weight == "Thin": + return 250 + elif weight == "Light": + return 300 + elif weight == "Bold": + return 700 + elif weight == "Medium": + return 500 + elif weight == "Semibold": + return 600 + elif weight == "Black": + return 900 + elif weight == "Fat": + return 900 + + return 400 + +def setNames(f,names,foundry="",version="1.0",build=""): + InstanceNames.foundry = foundry + InstanceNames.version = version + InstanceNames.build = build + i = InstanceNames(names) + i.setFLNames(f) + + +def setInfoRF(f, names, attrs={}): + i = InstanceNames(names) + version, versionMinor = (1, 0) + for k,v in attrs.iteritems(): + if k == 'version': + if v.find('.') != -1: + version, versionMinor = [int(num) for num in v.split(".")] + else: + version = int(v) + setattr(i, k, v) + i.setRFNames(f, version=version, versionMinor=versionMinor) diff --git a/misc/pylib/fontbuild/italics.py b/misc/pylib/fontbuild/italics.py new file mode 100644 index 000000000..91e658c74 --- /dev/null +++ b/misc/pylib/fontbuild/italics.py @@ -0,0 +1,308 @@ +# Copyright 2015 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import math + +from fontTools.misc.transform import Transform +import numpy as np +from numpy.linalg import norm +from scipy.sparse.linalg import cg +from scipy.ndimage.filters import gaussian_filter1d as gaussian +from scipy.cluster.vq import vq, whiten + +from fontbuild.alignpoints import alignCorners +from fontbuild.curveFitPen import fitGlyph, segmentGlyph + + +def italicizeGlyph(f, g, angle=10, stemWidth=185, meanYCenter=-825, narrowAmount=1): + unic = g.unicode #save unicode + + glyph = f[g.name] + slope = np.tanh(math.pi * angle / 180) + + # determine how far on the x axis the glyph should slide + # to compensate for the slant. + # meanYCenter: + # -600 is a magic number that assumes a 2048 unit em square, + # and -825 for a 2816 unit em square. (UPM*0.29296875) + m = Transform(1, 0, slope, 1, 0, 0) + xoffset, junk = m.transformPoint((0, meanYCenter)) + m = Transform(narrowAmount, 0, slope, 1, xoffset, 0) + + if len(glyph) > 0: + g2 = italicize(f[g.name], angle, xoffset=xoffset, stemWidth=stemWidth) + f.insertGlyph(g2, g.name) + + transformFLGlyphMembers(f[g.name], m) + + if unic > 0xFFFF: #restore unicode + g.unicode = unic + + +def italicize(glyph, angle=12, stemWidth=180, xoffset=-50): + CURVE_CORRECTION_WEIGHT = .03 + CORNER_WEIGHT = 10 + + # decompose the glyph into smaller segments + ga, subsegments = segmentGlyph(glyph,25) + va, e = glyphToMesh(ga) + n = len(va) + grad = mapEdges(lambda a,(p,n): normalize(p-a), va, e) + cornerWeights = mapEdges(lambda a,(p,n): normalize(p-a).dot(normalize(a-n)), grad, e)[:,0].reshape((-1,1)) + smooth = np.ones((n,1)) * CURVE_CORRECTION_WEIGHT + + controlPoints = findControlPointsInMesh(glyph, va, subsegments) + smooth[controlPoints > 0] = 1 + smooth[cornerWeights < .6] = CORNER_WEIGHT + # smooth[cornerWeights >= .9999] = 1 + + out = va.copy() + hascurves = False + for c in glyph.contours: + for s in c.segments: + if s.type == "curve": + hascurves = True + break + if hascurves: + break + if stemWidth > 100: + outCorrected = skewMesh(recompose(skewMesh(out, angle * 1.6), grad, e, smooth=smooth), -angle * 1.6) + # out = copyMeshDetails(va, out, e, 6) + else: + outCorrected = out + + # create a transform for italicizing + normals = edgeNormals(out, e) + center = va + normals * stemWidth * .4 + if stemWidth > 130: + center[:, 0] = va[:, 0] * .7 + center[:,0] * .3 + centerSkew = skewMesh(center.dot(np.array([[.97,0],[0,1]])), angle * .9) + + # apply the transform + out = outCorrected + (centerSkew - center) + out[:,1] = outCorrected[:,1] + + # make some corrections + smooth = np.ones((n,1)) * .1 + out = alignCorners(glyph, out, subsegments) + out = copyMeshDetails(skewMesh(va, angle), out, e, 7, smooth=smooth) + # grad = mapEdges(lambda a,(p,n): normalize(p-a), skewMesh(outCorrected, angle*.9), e) + # out = recompose(out, grad, e, smooth=smooth) + + out = skewMesh(out, angle * .1) + out[:,0] += xoffset + # out[:,1] = outCorrected[:,1] + out[va[:,1] == 0, 1] = 0 + gOut = meshToGlyph(out, ga) + # gOut.width *= .97 + # gOut.width += 10 + # return gOut + + # recompose the glyph into original segments + return fitGlyph(glyph, gOut, subsegments) + + +def transformFLGlyphMembers(g, m, transformAnchors = True): + # g.transform(m) + g.width = g.width * m[0] + p = m.transformPoint((0,0)) + for c in g.components: + d = m.transformPoint(c.offset) + c.offset = (d[0] - p[0], d[1] - p[1]) + if transformAnchors: + for a in g.anchors: + aa = m.transformPoint((a.x,a.y)) + a.x = aa[0] + # a.x,a.y = (aa[0] - p[0], aa[1] - p[1]) + # a.x = a.x - m[4] + + +def glyphToMesh(g): + points = [] + edges = {} + offset = 0 + for c in g.contours: + if len(c) < 2: + continue + for i,prev,next in rangePrevNext(len(c)): + points.append((c[i].points[0].x, c[i].points[0].y)) + edges[i + offset] = np.array([prev + offset, next + offset], dtype=int) + offset += len(c) + return np.array(points), edges + + +def meshToGlyph(points, g): + g1 = g.copy() + j = 0 + for c in g1.contours: + if len(c) < 2: + continue + for i in range(len(c)): + c[i].points[0].x = points[j][0] + c[i].points[0].y = points[j][1] + j += 1 + return g1 + + +def quantizeGradient(grad, book=None): + if book == None: + book = np.array([(1,0),(0,1),(0,-1),(-1,0)]) + indexArray = vq(whiten(grad), book)[0] + out = book[indexArray] + for i,v in enumerate(out): + out[i] = normalize(v) + return out + + +def findControlPointsInMesh(glyph, va, subsegments): + controlPointIndices = np.zeros((len(va),1)) + index = 0 + for i,c in enumerate(subsegments): + segmentCount = len(glyph.contours[i].segments) - 1 + for j,s in enumerate(c): + if j < segmentCount: + if glyph.contours[i].segments[j].type == "line": + controlPointIndices[index] = 1 + index += s[1] + return controlPointIndices + + +def recompose(v, grad, e, smooth=1, P=None, distance=None): + n = len(v) + if distance == None: + distance = mapEdges(lambda a,(p,n): norm(p - a), v, e) + if (P == None): + P = mP(v,e) + P += np.identity(n) * smooth + f = v.copy() + for i,(prev,next) in e.iteritems(): + f[i] = (grad[next] * distance[next] - grad[i] * distance[i]) + out = v.copy() + f += v * smooth + for i in range(len(out[0,:])): + out[:,i] = cg(P, f[:,i])[0] + return out + + +def mP(v,e): + n = len(v) + M = np.zeros((n,n)) + for i, edges in e.iteritems(): + w = -2 / float(len(edges)) + for index in edges: + M[i,index] = w + M[i,i] = 2 + return M + + +def normalize(v): + n = np.linalg.norm(v) + if n == 0: + return v + return v/n + + +def mapEdges(func,v,e,*args): + b = v.copy() + for i, edges in e.iteritems(): + b[i] = func(v[i], [v[j] for j in edges], *args) + return b + + +def getNormal(a,b,c): + "Assumes TT winding direction" + p = np.roll(normalize(b - a), 1) + n = -np.roll(normalize(c - a), 1) + p[1] *= -1 + n[1] *= -1 + # print p, n, normalize((p + n) * .5) + return normalize((p + n) * .5) + + +def edgeNormals(v,e): + "Assumes a mesh where each vertex has exactly least two edges" + return mapEdges(lambda a,(p,n) : getNormal(a,p,n),v,e) + + +def rangePrevNext(count): + c = np.arange(count,dtype=int) + r = np.vstack((c, np.roll(c, 1), np.roll(c, -1))) + return r.T + + +def skewMesh(v,angle): + slope = np.tanh([math.pi * angle / 180]) + return v.dot(np.array([[1,0],[slope,1]])) + + +def labelConnected(e): + label = 0 + labels = np.zeros((len(e),1)) + for i,(prev,next) in e.iteritems(): + labels[i] = label + if next <= i: + label += 1 + return labels + + +def copyGradDetails(a,b,e,scale=15): + n = len(a) + labels = labelConnected(e) + out = a.astype(float).copy() + for i in range(labels[-1]+1): + mask = (labels==i).flatten() + out[mask,:] = gaussian(b[mask,:], scale, mode="wrap", axis=0) + a[mask,:] - gaussian(a[mask,:], scale, mode="wrap", axis=0) + return out + + +def copyMeshDetails(va,vb,e,scale=5,smooth=.01): + gradA = mapEdges(lambda a,(p,n): normalize(p-a), va, e) + gradB = mapEdges(lambda a,(p,n): normalize(p-a), vb, e) + grad = copyGradDetails(gradA, gradB, e, scale) + grad = mapEdges(lambda a,(p,n): normalize(a), grad, e) + return recompose(vb, grad, e, smooth=smooth) + + +def condenseGlyph(glyph, scale=.8, stemWidth=185): + ga, subsegments = segmentGlyph(glyph, 25) + va, e = glyphToMesh(ga) + n = len(va) + + normals = edgeNormals(va,e) + cn = va.dot(np.array([[scale, 0],[0,1]])) + grad = mapEdges(lambda a,(p,n): normalize(p-a), cn, e) + # ograd = mapEdges(lambda a,(p,n): normalize(p-a), va, e) + + cn[:,0] -= normals[:,0] * stemWidth * .5 * (1 - scale) + out = recompose(cn, grad, e, smooth=.5) + # out = recompose(out, grad, e, smooth=.1) + out = recompose(out, grad, e, smooth=.01) + + # cornerWeights = mapEdges(lambda a,(p,n): normalize(p-a).dot(normalize(a-n)), grad, e)[:,0].reshape((-1,1)) + # smooth = np.ones((n,1)) * .1 + # smooth[cornerWeights < .6] = 10 + # + # grad2 = quantizeGradient(grad).astype(float) + # grad2 = copyGradDetails(grad, grad2, e, scale=10) + # grad2 = mapEdges(lambda a,e: normalize(a), grad2, e) + # out = recompose(out, grad2, e, smooth=smooth) + out[:,0] += 15 + out[:,1] = va[:,1] + # out = recompose(out, grad, e, smooth=.5) + gOut = meshToGlyph(out, ga) + gOut = fitGlyph(glyph, gOut, subsegments) + for i,seg in enumerate(gOut): + gOut[i].points[0].y = glyph[i].points[0].y + return gOut diff --git a/misc/pylib/fontbuild/markFeature.py b/misc/pylib/fontbuild/markFeature.py new file mode 100755 index 000000000..42cafe4c7 --- /dev/null +++ b/misc/pylib/fontbuild/markFeature.py @@ -0,0 +1,55 @@ +# Copyright 2015 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from ufo2ft.kernFeatureWriter import KernFeatureWriter +from ufo2ft.makeotfParts import FeatureOTFCompiler + + +class RobotoFeatureCompiler(FeatureOTFCompiler): + def precompile(self): + self.overwriteFeatures = True + + def setupAnchorPairs(self): + self.anchorPairs = [ + ["top", "_marktop"], + ["bottom", "_markbottom"], + ["top_dd", "_marktop_dd"], + ["bottom_dd", "_markbottom_dd"], + ["rhotichook", "_markrhotichook"], + ["top0315", "_marktop0315"], + ["parent_top", "_markparent_top"], + ["parenthesses.w1", "_markparenthesses.w1"], + ["parenthesses.w2", "_markparenthesses.w2"], + ["parenthesses.w3", "_markparenthesses.w3"]] + + self.mkmkAnchorPairs = [ + ["mkmktop", "_marktop"], + ["mkmkbottom_acc", "_markbottom"], + + # By providing a pair with accent anchor _bottom and no base anchor, + # we designate all glyphs with _bottom as accents (so that they will + # be used as base glyphs for mkmk features) without generating any + # positioning rules actually using this anchor (which is instead + # used to generate composite glyphs). This is all for consistency + # with older roboto versions. + ["", "_bottom"], + ] + + self.ligaAnchorPairs = [] + + +class RobotoKernWriter(KernFeatureWriter): + leftFeaClassRe = r"@_(.+)_L$" + rightFeaClassRe = r"@_(.+)_R$" diff --git a/misc/pylib/fontbuild/mitreGlyph.py b/misc/pylib/fontbuild/mitreGlyph.py new file mode 100644 index 000000000..d0834ed84 --- /dev/null +++ b/misc/pylib/fontbuild/mitreGlyph.py @@ -0,0 +1,111 @@ +# Copyright 2015 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +"""Mitre Glyph: + +mitreSize : Length of the segment created by the mitre. The default is 4. +maxAngle : Maximum angle in radians at which segments will be mitred. The default is .9 (about 50 degrees). + Works for both inside and outside angles + +""" + +import math +from robofab.objects.objectsRF import RPoint, RSegment +from fontbuild.convertCurves import replaceSegments + +def getTangents(contours): + tmap = [] + for c in contours: + clen = len(c) + for i in range(clen): + s = c[i] + p = s.points[-1] + ns = c[(i + 1) % clen] + ps = c[(clen + i - 1) % clen] + np = ns.points[1] if ns.type == 'curve' else ns.points[-1] + pp = s.points[2] if s.type == 'curve' else ps.points[-1] + tmap.append((pp - p, np - p)) + return tmap + +def normalizeVector(p): + m = getMagnitude(p); + if m != 0: + return p*(1/m) + else: + return RPoint(0,0) + +def getMagnitude(p): + return math.sqrt(p.x*p.x + p.y*p.y) + +def getDistance(v1,v2): + return getMagnitude(RPoint(v1.x - v2.x, v1.y - v2.y)) + +def getAngle(v1,v2): + angle = math.atan2(v1.y,v1.x) - math.atan2(v2.y,v2.x) + return (angle + (2*math.pi)) % (2*math.pi) + +def angleDiff(a,b): + return math.pi - abs((abs(a - b) % (math.pi*2)) - math.pi) + +def getAngle2(v1,v2): + return abs(angleDiff(math.atan2(v1.y, v1.x), math.atan2(v2.y, v2.x))) + +def getMitreOffset(n,v1,v2,mitreSize=4,maxAngle=.9): + + # dont mitre if segment is too short + if abs(getMagnitude(v1)) < mitreSize * 2 or abs(getMagnitude(v2)) < mitreSize * 2: + return + angle = getAngle2(v2,v1) + v1 = normalizeVector(v1) + v2 = normalizeVector(v2) + if v1.x == v2.x and v1.y == v2.y: + return + + + # only mitre corners sharper than maxAngle + if angle > maxAngle: + return + + radius = mitreSize / abs(getDistance(v1,v2)) + offset1 = RPoint(round(v1.x * radius), round(v1.y * radius)) + offset2 = RPoint(round(v2.x * radius), round(v2.y * radius)) + return offset1, offset2 + +def mitreGlyph(g,mitreSize,maxAngle): + if g == None: + return + + tangents = getTangents(g.contours) + sid = -1 + for c in g.contours: + segments = [] + needsMitring = False + for s in c: + sid += 1 + v1, v2 = tangents[sid] + off = getMitreOffset(s,v1,v2,mitreSize,maxAngle) + s1 = s.copy() + if off != None: + offset1, offset2 = off + p2 = s.points[-1] + offset2 + s2 = RSegment('line', [(p2.x, p2.y)]) + s1.points[0] += offset1 + segments.append(s1) + segments.append(s2) + needsMitring = True + else: + segments.append(s1) + if needsMitring: + replaceSegments(c, segments) diff --git a/misc/pylib/fontbuild/mix.py b/misc/pylib/fontbuild/mix.py new file mode 100644 index 000000000..5e5388b3e --- /dev/null +++ b/misc/pylib/fontbuild/mix.py @@ -0,0 +1,360 @@ +# Copyright 2015 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from numpy import array, append +import copy +import json +from robofab.objects.objectsRF import RPoint, RGlyph +from robofab.world import OpenFont +from decomposeGlyph import decomposeGlyph + + +class FFont: + "Font wrapper for floating point operations" + + def __init__(self,f=None): + self.glyphs = {} + self.hstems = [] + self.vstems = [] + self.kerning = {} + if isinstance(f,FFont): + #self.glyphs = [g.copy() for g in f.glyphs] + for key,g in f.glyphs.iteritems(): + self.glyphs[key] = g.copy() + self.hstems = list(f.hstems) + self.vstems = list(f.vstems) + self.kerning = dict(f.kerning) + elif f != None: + self.copyFromFont(f) + + def copyFromFont(self, f): + for g in f: + self.glyphs[g.name] = FGlyph(g) + self.hstems = [s for s in f.info.postscriptStemSnapH] + self.vstems = [s for s in f.info.postscriptStemSnapV] + self.kerning = f.kerning.asDict() + + + def copyToFont(self, f): + for g in f: + try: + gF = self.glyphs[g.name] + gF.copyToGlyph(g) + except: + print "Copy to glyph failed for" + g.name + f.info.postscriptStemSnapH = self.hstems + f.info.postscriptStemSnapV = self.vstems + for pair in self.kerning: + f.kerning[pair] = self.kerning[pair] + + def getGlyph(self, gname): + try: + return self.glyphs[gname] + except: + return None + + def setGlyph(self, gname, glyph): + self.glyphs[gname] = glyph + + def addDiff(self,b,c): + newFont = FFont(self) + for key,g in newFont.glyphs.iteritems(): + gB = b.getGlyph(key) + gC = c.getGlyph(key) + try: + newFont.glyphs[key] = g.addDiff(gB,gC) + except: + print "Add diff failed for '%s'" %key + return newFont + +class FGlyph: + "provides a temporary floating point compatible glyph data structure" + + def __init__(self, g=None): + self.contours = [] + self.width = 0. + self.components = [] + self.anchors = [] + if g != None: + self.copyFromGlyph(g) + + def copyFromGlyph(self,g): + self.name = g.name + valuesX = [] + valuesY = [] + self.width = len(valuesX) + valuesX.append(g.width) + for c in g.components: + self.components.append((len(valuesX), len(valuesY))) + valuesX.append(c.scale[0]) + valuesY.append(c.scale[1]) + valuesX.append(c.offset[0]) + valuesY.append(c.offset[1]) + + for a in g.anchors: + self.anchors.append((len(valuesX), len(valuesY))) + valuesX.append(a.x) + valuesY.append(a.y) + + for i in range(len(g)): + self.contours.append([]) + for j in range (len(g[i].points)): + self.contours[i].append((len(valuesX), len(valuesY))) + valuesX.append(g[i].points[j].x) + valuesY.append(g[i].points[j].y) + + self.dataX = array(valuesX, dtype=float) + self.dataY = array(valuesY, dtype=float) + + def copyToGlyph(self,g): + g.width = self._derefX(self.width) + if len(g.components) == len(self.components): + for i in range(len(self.components)): + g.components[i].scale = (self._derefX(self.components[i][0] + 0, asInt=False), + self._derefY(self.components[i][1] + 0, asInt=False)) + g.components[i].offset = (self._derefX(self.components[i][0] + 1), + self._derefY(self.components[i][1] + 1)) + if len(g.anchors) == len(self.anchors): + for i in range(len(self.anchors)): + g.anchors[i].x = self._derefX( self.anchors[i][0]) + g.anchors[i].y = self._derefY( self.anchors[i][1]) + for i in range(len(g)) : + for j in range (len(g[i].points)): + g[i].points[j].x = self._derefX(self.contours[i][j][0]) + g[i].points[j].y = self._derefY(self.contours[i][j][1]) + + def isCompatible(self, g): + return (len(self.dataX) == len(g.dataX) and + len(self.dataY) == len(g.dataY) and + len(g.contours) == len(self.contours)) + + def __add__(self,g): + if self.isCompatible(g): + newGlyph = self.copy() + newGlyph.dataX = self.dataX + g.dataX + newGlyph.dataY = self.dataY + g.dataY + return newGlyph + else: + print "Add failed for '%s'" %(self.name) + raise Exception + + def __sub__(self,g): + if self.isCompatible(g): + newGlyph = self.copy() + newGlyph.dataX = self.dataX - g.dataX + newGlyph.dataY = self.dataY - g.dataY + return newGlyph + else: + print "Subtract failed for '%s'" %(self.name) + raise Exception + + def __mul__(self,scalar): + newGlyph = self.copy() + newGlyph.dataX = self.dataX * scalar + newGlyph.dataY = self.dataY * scalar + return newGlyph + + def scaleX(self,scalar): + newGlyph = self.copy() + if len(self.dataX) > 0: + newGlyph.dataX = self.dataX * scalar + for i in range(len(newGlyph.components)): + newGlyph.dataX[newGlyph.components[i][0]] = self.dataX[newGlyph.components[i][0]] + return newGlyph + + def shift(self,ammount): + newGlyph = self.copy() + newGlyph.dataX = self.dataX + ammount + for i in range(len(newGlyph.components)): + newGlyph.dataX[newGlyph.components[i][0]] = self.dataX[newGlyph.components[i][0]] + return newGlyph + + def interp(self, g, v): + gF = self.copy() + if not self.isCompatible(g): + print "Interpolate failed for '%s'; outlines incompatible" %(self.name) + raise Exception + + gF.dataX += (g.dataX - gF.dataX) * v.x + gF.dataY += (g.dataY - gF.dataY) * v.y + return gF + + def copy(self): + ng = FGlyph() + ng.contours = list(self.contours) + ng.width = self.width + ng.components = list(self.components) + ng.anchors = list(self.anchors) + ng.dataX = self.dataX.copy() + ng.dataY = self.dataY.copy() + ng.name = self.name + return ng + + def _derefX(self,id, asInt=True): + val = self.dataX[id] + return int(round(val)) if asInt else val + + def _derefY(self,id, asInt=True): + val = self.dataY[id] + return int(round(val)) if asInt else val + + def addDiff(self,gB,gC): + newGlyph = self + (gB - gC) + return newGlyph + + + +class Master: + + def __init__(self, font=None, v=0, kernlist=None, overlay=None): + if isinstance(font, FFont): + self.font = None + self.ffont = font + elif isinstance(font,str): + self.openFont(font,overlay) + elif isinstance(font,Mix): + self.font = font + else: + self.font = font + self.ffont = FFont(font) + if isinstance(v,float) or isinstance(v,int): + self.v = RPoint(v, v) + else: + self.v = v + if kernlist != None: + kerns = [i.strip().split() for i in open(kernlist).readlines()] + + self.kernlist = [{'left':k[0], 'right':k[1], 'value': k[2]} + for k in kerns + if not k[0].startswith("#") + and not k[0] == ""] + #TODO implement class based kerning / external kerning file + + def openFont(self, path, overlayPath=None): + self.font = OpenFont(path) + for g in self.font: + size = len(g) + csize = len(g.components) + if (size > 0 and csize > 0): + decomposeGlyph(self.font, self.font[g.name]) + + if overlayPath != None: + overlayFont = OpenFont(overlayPath) + font = self.font + for overlayGlyph in overlayFont: + font.insertGlyph(overlayGlyph) + + self.ffont = FFont(self.font) + + +class Mix: + def __init__(self,masters,v): + self.masters = masters + if isinstance(v,float) or isinstance(v,int): + self.v = RPoint(v,v) + else: + self.v = v + + def getFGlyph(self, master, gname): + if isinstance(master.font, Mix): + return font.mixGlyphs(gname) + return master.ffont.getGlyph(gname) + + def getGlyphMasters(self,gname): + masters = self.masters + if len(masters) <= 2: + return self.getFGlyph(masters[0], gname), self.getFGlyph(masters[-1], gname) + + def generateFFont(self): + ffont = FFont(self.masters[0].ffont) + for key,g in ffont.glyphs.iteritems(): + ffont.glyphs[key] = self.mixGlyphs(key) + ffont.kerning = self.mixKerns() + return ffont + + def generateFont(self, baseFont): + newFont = baseFont.copy() + #self.mixStems(newFont) todo _ fix stems code + for g in newFont: + gF = self.mixGlyphs(g.name) + if gF == None: + g.mark = True + elif isinstance(gF, RGlyph): + newFont[g.name] = gF.copy() + else: + gF.copyToGlyph(g) + + newFont.kerning.clear() + newFont.kerning.update(self.mixKerns() or {}) + return newFont + + def mixGlyphs(self,gname): + gA,gB = self.getGlyphMasters(gname) + try: + return gA.interp(gB,self.v) + except: + print "mixglyph failed for %s" %(gname) + if gA != None: + return gA.copy() + + def getKerning(self, master): + if isinstance(master.font, Mix): + return master.font.mixKerns() + return master.ffont.kerning + + def mixKerns(self): + masters = self.masters + kA, kB = self.getKerning(masters[0]), self.getKerning(masters[-1]) + return interpolateKerns(kA, kB, self.v) + + +def narrowFLGlyph(g, gThin, factor=.75): + gF = FGlyph(g) + if not isinstance(gThin,FGlyph): + gThin = FGlyph(gThin) + gCondensed = gThin.scaleX(factor) + try: + gNarrow = gF + (gCondensed - gThin) + gNarrow.copyToGlyph(g) + except: + print "No dice for: " + g.name + +def interpolate(a,b,v,e=0): + if e == 0: + return a+(b-a)*v + qe = (b-a)*v*v*v + a #cubic easing + le = a+(b-a)*v # linear easing + return le + (qe-le) * e + +def interpolateKerns(kA, kB, v): + # to yield correct kerning for Roboto output, we must emulate the behavior + # of old versions of this code; namely, take the kerning values of the first + # master instead of actually interpolating. + # old code: + # https://github.com/google/roboto/blob/7f083ac31241cc86d019ea6227fa508b9fcf39a6/scripts/lib/fontbuild/mix.py + # bug: + # https://github.com/google/roboto/issues/213 + # return dict(kA) + + kerns = {} + for pair, val in kA.items(): + kerns[pair] = interpolate(val, kB.get(pair, 0), v.x) + for pair, val in kB.items(): + lerped_val = interpolate(val, kA.get(pair, 0), 1 - v.x) + if pair in kerns: + assert abs(kerns[pair] - lerped_val) < 1e-6 + else: + kerns[pair] = lerped_val + return kerns diff --git a/misc/restore-diacritics-kerning.py b/misc/restore-diacritics-kerning.py new file mode 100644 index 000000000..d0fe3100f --- /dev/null +++ b/misc/restore-diacritics-kerning.py @@ -0,0 +1,431 @@ +#!/usr/bin/env python +# encoding: utf8 +# +# This script was used specifically to re-introduce a bunch of kerning values +# that where lost in an old kerning cleanup that failed to account for +# automatically composed glyphs defined in diacritics.txt. +# +# Steps: +# 1. git diff 10e15297b 10e15297b^ > 10e15297b.diff +# 2. edit 10e15297b.diff and remove the python script add +# 3. fetch copies of kerning.plist and groups.plist from before the loss change +# bold-groups.plist +# bold-kerning.plist +# regular-groups.plist +# regular-kerning.plist +# 4. run this script +# +from __future__ import print_function +import os, sys, plistlib, json +from collections import OrderedDict +from ConfigParser import RawConfigParser +from argparse import ArgumentParser +from fontTools import ttLib +from robofab.objects.objectsRF import OpenFont + + +srcFontPaths = ['src/Interface-Regular.ufo', 'src/Interface-Bold.ufo'] + + +def getTTGlyphList(font): # -> { 'Omega': [2126, ...], ... } + if isinstance(font, str): + font = ttLib.TTFont(font) + + if not 'cmap' in font: + raise Exception('missing cmap table') + + gl = {} + bestCodeSubTable = None + bestCodeSubTableFormat = 0 + + for st in font['cmap'].tables: + if st.platformID == 0: # 0=unicode, 1=mac, 2=(reserved), 3=microsoft + if st.format > bestCodeSubTableFormat: + bestCodeSubTable = st + bestCodeSubTableFormat = st.format + + if bestCodeSubTable is not None: + for cp, glyphname in bestCodeSubTable.cmap.items(): + if glyphname in gl: + gl[glyphname].append(cp) + else: + gl[glyphname] = [cp] + + return gl, font + + +def parseAGL(filename): # -> { 2126: 'Omega', ... } + m = {} + with open(filename, 'r') as f: + for line in f: + # Omega;2126 + # dalethatafpatah;05D3 05B2 # higher-level combinations; ignored + line = line.strip() + if len(line) > 0 and line[0] != '#': + name, uc = tuple([c.strip() for c in line.split(';')]) + if uc.find(' ') == -1: + # it's a 1:1 mapping + m[int(uc, 16)] = name + return m + + +def parseGlyphComposition(composite): + c = composite.split("=") + d = c[1].split("/") + glyphName = d[0] + if len(d) == 1: + offset = [0, 0] + else: + offset = [int(i) for i in d[1].split(",")] + accentString = c[0] + accents = accentString.split("+") + baseName = accents.pop(0) + accentNames = [i.split(":") for i in accents] + return (glyphName, baseName, accentNames, offset) + + +def loadGlyphCompositions(filename): + compositions = OrderedDict() + with open(filename, 'r') as f: + for line in f: + line = line.strip() + if len(line) > 0 and line[0] != '#': + glyphName, baseName, accentNames, offset = parseGlyphComposition(line) + compositions[glyphName] = (baseName, accentNames, offset) + return compositions + + +def loadNamesFromDiff(diffFilename): + with open(diffFilename, 'r') as f: + diffLines = [s.strip() for s in f.read().splitlines() if s.startswith('+\t')] + diffLines = [s for s in diffLines if not s.startswith('') + if p != -1: + p2 = s.find('<', p+1) + if p2 != -1: + name = s[p+1:p2] + try: + int(name) + except: + if not name.startswith('@'): + namesInDiff.add(s[p+1:p2]) + return namesInDiff + + +def loadGroups(filename): + groups = plistlib.readPlist(filename) + nameMap = {} # { glyphName => set(groupName) } + for groupName, glyphNames in groups.iteritems(): + for glyphName in glyphNames: + nameMap.setdefault(glyphName, set()).add(groupName) + return groups, nameMap + + +def loadKerning(filename): + kerning = plistlib.readPlist(filename) + # + # @KERN_LEFT_A + # + # @KERN_RIGHT_C + # -96 + + leftIndex = {} # { glyph-name => } + rightIndex = {} # { glyph-name => [(left-hand-side-name, kernVal), ...] } + rightGroupIndex = {} # { group-name => [(left-hand-side-name, kernVal), ...] } + + for leftName, right in kerning.iteritems(): + if leftName[0] != '@': + leftIndex[leftName] = right + + for rightName, kernVal in right.iteritems(): + if rightName[0] != '@': + rightIndex.setdefault(rightName, []).append((leftName, kernVal)) + else: + rightGroupIndex.setdefault(rightName, []).append((leftName, kernVal)) + + return kerning, leftIndex, rightIndex, rightGroupIndex + + +def loadAltNamesDB(agl, fontFilename): + uc2names = {} # { 2126: ['Omega', ...], ...} + name2ucs = {} # { 'Omega': [2126, ...], ...} + + name2ucs, _ = getTTGlyphList(fontFilename) + # -> { 'Omega': [2126, ...], ... } + for name, ucs in name2ucs.iteritems(): + for uc in ucs: + uc2names.setdefault(uc, []).append(name) + + for uc, name in agl.iteritems(): + name2ucs.setdefault(name, []).append(uc) + uc2names.setdefault(uc, []).append(name) + # -> { 2126: 'Omega', ... } + + return uc2names, name2ucs + + +def loadLocalNamesDB(agl, diacriticComps): # { 2126: ['Omega', ...], ...} + uc2names = None + + for fontPath in srcFontPaths: + font = OpenFont(fontPath) + if uc2names is None: + uc2names = font.getCharacterMapping() # { 2126: ['Omega', ...], ...} + else: + for uc, names in font.getCharacterMapping().iteritems(): + names2 = uc2names.get(uc, []) + for name in names: + if name not in names2: + names2.append(name) + uc2names[uc] = names2 + + # agl { 2126: 'Omega', ...} -> { 'Omega': [2126, ...], ...} + aglName2Ucs = {} + for uc, name in agl.iteritems(): + aglName2Ucs.setdefault(name, []).append(uc) + + for glyphName, comp in diacriticComps.iteritems(): + for uc in aglName2Ucs.get(glyphName, []): + names = uc2names.get(uc, []) + if glyphName not in names: + names.append(glyphName) + uc2names[uc] = names + + name2ucs = {} + for uc, names in uc2names.iteritems(): + for name in names: + name2ucs.setdefault(name, set()).add(uc) + + return uc2names, name2ucs + + +def _canonicalGlyphName(name, localName2ucs, localUc2Names, altName2ucs): + ucs = localName2ucs.get(name) + if ucs: + return name, list(ucs)[0] + ucs = altName2ucs.get(name) + if ucs: + for uc in ucs: + localNames = localUc2Names.get(uc) + if localNames and len(localNames): + return localNames[0], uc + return None, None + + +def main(): + argparser = ArgumentParser(description='Restore lost kerning') + + argparser.add_argument( + '-dry', dest='dryRun', action='store_const', const=True, default=False, + help='Do not modify anything, but instead just print what would happen.') + + argparser.add_argument( + 'srcFont', metavar='', type=str, + help='TrueType, OpenType or UFO fonts to gather glyph info from') + + argparser.add_argument( + 'diffFile', metavar='', type=str, help='Diff file') + + args = argparser.parse_args() + + dryRun = args.dryRun + + agl = parseAGL('src/glyphlist.txt') + diacriticComps = loadGlyphCompositions('src/diacritics.txt') + + altUc2names, altName2ucs = loadAltNamesDB(agl, args.srcFont) + localUc2Names, localName2ucs = loadLocalNamesDB(agl, diacriticComps) + + canonicalGlyphName = lambda name: _canonicalGlyphName( + name, localName2ucs, localUc2Names, altName2ucs) + + deletedNames = loadNamesFromDiff(args.diffFile) # 10e15297b.diff + deletedDiacriticNames = OrderedDict() + + for glyphName, comp in diacriticComps.iteritems(): + if glyphName in deletedNames: + deletedDiacriticNames[glyphName] = comp + + + for fontPath in srcFontPaths: + addedGroupNames = set() + + oldFilenamePrefix = 'regular' + if fontPath.find('Bold') != -1: + oldFilenamePrefix = 'bold' + oldGroups, oldNameToGroups = loadGroups( + oldFilenamePrefix + '-groups.plist') + oldKerning, oldLIndex, oldRIndex, oldRGroupIndex = loadKerning( + oldFilenamePrefix + '-kerning.plist') + # lIndex : { name => } + # rIndex : { name => [(left-hand-side-name, kernVal), ...] } + + currGroupFilename = os.path.join(fontPath, 'groups.plist') + currKerningFilename = os.path.join(fontPath, 'kerning.plist') + currGroups, currNameToGroups = loadGroups(currGroupFilename) + currKerning, currLIndex, currRIndex, currRGroupIndex = loadKerning(currKerningFilename) + + for glyphName, comp in deletedDiacriticNames.iteritems(): + oldGroupMemberships = oldNameToGroups.get(glyphName) + localGlyphName, localUc = canonicalGlyphName(glyphName) + + # if glyphName != 'dcaron': + # continue # XXX DEBUG + + if localGlyphName is None: + # glyph does no longer exist -- ignore + print('[IGNORE]', glyphName) + continue + + if oldGroupMemberships: + # print('group', localGlyphName, + # '=>', localUc, + # 'in old group:', oldGroupMemberships, ', curr group:', currGroupMemberships) + for oldGroupName in oldGroupMemberships: + currGroup = currGroups.get(oldGroupName) # None|[glyphname, ...] + # print('GM ', localGlyphName, oldGroupName, len(currGroup) if currGroup else 0) + if currGroup is not None: + if localGlyphName not in currGroup: + # print('[UPDATE group]', oldGroupName, 'append', localGlyphName) + currGroup.append(localGlyphName) + else: + # group does not currently exist + if currNameToGroups.get(localGlyphName): + raise Exception('TODO: case where glyph is in some current groups, but not the' + + 'original-named group') + print('[ADD group]', oldGroupName, '=> [', localGlyphName, ']') + currGroups[oldGroupName] = [localGlyphName] + addedGroupNames.add(oldGroupName) + # if oldGroupName in oldKerning: + # print('TODO: effects of oldGroupName being in oldKerning:', + # oldKerning[oldGroupName]) + if oldGroupName in oldRGroupIndex: + print('TODO: effects of oldGroupName being in oldRGroupIndex:', + oldRGroupIndex[oldGroupName]) + + else: # if not oldGroupMemberships + ucs = localName2ucs.get(glyphName) + if not ucs: + raise Exception( + 'TODO non-group, non-local name ' + glyphName + ' -- lookup in alt names') + + asLeft = oldLIndex.get(glyphName) + atRightOf = oldRIndex.get(glyphName) + + # print('individual', glyphName, + # '=>', ', '.join([str(uc) for uc in ucs]), + # '\n as left:', asLeft is not None, + # '\n at right of:', atRightOf is not None) + + if asLeft: + currKern = currKerning.get(localGlyphName) + if currKern is None: + rightValues = {} + for rightName, kernValue in asLeft.iteritems(): + if rightName[0] == '@': + currGroup = currGroups.get(rightName) + if currGroup and localGlyphName not in currGroup: + rightValues[rightName] = kernValue + else: + localName, localUc = canonicalGlyphName(rightName) + if localName: + rightValues[localName] = kernValue + if len(rightValues) > 0: + print('[ADD currKerning]', localGlyphName, '=>', rightValues) + currKerning[localGlyphName] = rightValues + + if atRightOf: + for parentLeftName, kernVal in atRightOf: + # print('atRightOf:', parentLeftName, kernVal) + if parentLeftName[0] == '@': + if parentLeftName in currGroups: + k = currKerning.get(parentLeftName) + if k: + if localGlyphName not in k: + print('[UPDATE currKerning g]', + parentLeftName, '+= {', localGlyphName, ':', kernVal, '}') + k[localGlyphName] = kernVal + else: + print('TODO: left-group is NOT in currKerning; left-group', parentLeftName) + else: + localParentLeftGlyphName, _ = canonicalGlyphName(parentLeftName) + if localParentLeftGlyphName: + k = currKerning.get(localParentLeftGlyphName) + if k: + if localGlyphName not in k: + print('[UPDATE currKerning i]', + localParentLeftGlyphName, '+= {', localGlyphName, ':', kernVal, '}') + k[localGlyphName] = kernVal + else: + print('[ADD currKerning i]', + localParentLeftGlyphName, '=> {', localGlyphName, ':', kernVal, '}') + currKerning[localParentLeftGlyphName] = {localGlyphName: kernVal} + + + for groupName in addedGroupNames: + print('————————————————————————————————————————————') + print('re-introduce group', groupName, 'to kerning') + + oldRKern = oldKerning.get(groupName) + if oldRKern is not None: + newRKern = {} + for oldRightName, kernVal in oldRKern.iteritems(): + if oldRightName[0] == '@': + if oldRightName in currGroups: + newRKern[oldRightName] = kernVal + else: + # Note: (oldRightName in addedGroupNames) should always be False here + # as we would have added it to currGroups already. + print('[DROP group]', oldRightName, kernVal) + if oldRightName in currGroups: + del currGroups[oldRightName] + else: + localGlyphName, _ = canonicalGlyphName(oldRightName) + if localGlyphName: + newRKern[localGlyphName] = kernVal + print('localGlyphName', localGlyphName) + + if len(newRKern): + print('[ADD currKerning g]', groupName, newRKern) + currKerning[groupName] = newRKern + + # oldRGroupIndex : { group-name => [(left-hand-side-name, kernVal), ...] } + oldLKern = oldRGroupIndex.get(groupName) + if oldLKern: + for oldRightName, kernVal in oldLKern: + if oldRightName[0] == '@': + if oldRightName in currGroups: + k = currKerning.get(oldRightName) + if k is not None: + print('[UPDATE kerning g]', oldRightName, '+= {', groupName, ':', kernVal, '}') + k[groupName] = kernVal + else: + currKerning[oldRightName] = {groupName: kernVal} + print('[ADD kerning g]', oldRightName, '= {', groupName, ':', kernVal, '}') + else: + localGlyphName, _ = canonicalGlyphName(oldRightName) + if localGlyphName: + k = currKerning.get(localGlyphName) + if k is not None: + print('[UPDATE kerning i]', localGlyphName, '+= {', groupName, ':', kernVal, '}') + k[groupName] = kernVal + else: + currKerning[localGlyphName] = {groupName: kernVal} + print('[ADD kerning i]', localGlyphName, '= {', groupName, ':', kernVal, '}') + + + print('Write', currGroupFilename) + if not dryRun: + plistlib.writePlist(currGroups, currGroupFilename) + + print('Write', currKerningFilename) + if not dryRun: + plistlib.writePlist(currKerning, currKerningFilename) + + # end: for fontPath + +main() diff --git a/misc/rewrite-glyphorder.py b/misc/rewrite-glyphorder.py new file mode 100755 index 000000000..3da0c1699 --- /dev/null +++ b/misc/rewrite-glyphorder.py @@ -0,0 +1,305 @@ +#!/usr/bin/env python +# encoding: utf8 +from __future__ import print_function +import os, sys, plistlib, json, re +from collections import OrderedDict +from argparse import ArgumentParser +from ConfigParser import RawConfigParser +from fontTools import ttLib +from robofab.objects.objectsRF import OpenFont + + +# Regex matching "default" glyph names, like "uni2043" and "u01C5" +uniNameRe = re.compile(r'^u(?:ni)([0-9A-F]{4,8})$') + + +class PList: + def __init__(self, filename): + self.filename = filename + self.plist = None + + def load(self): + self.plist = plistlib.readPlist(self.filename) + + def save(self): + if self.plist is not None: + plistlib.writePlist(self.plist, self.filename) + + def get(self, k, defaultValue=None): + if self.plist is None: + self.load() + return self.plist.get(k, defaultValue) + + def __getitem__(self, k): + if self.plist is None: + self.load() + return self.plist[k] + + def __setitem__(self, k, v): + if self.plist is None: + self.load() + self.plist[k] = v + + def __delitem__(self, k): + if self.plist is None: + self.load() + del self.plist[k] + + +def parseAGL(filename): # -> { 2126: 'Omega', ... } + m = {} + with open(filename, 'r') as f: + for line in f: + # Omega;2126 + # dalethatafpatah;05D3 05B2 # higher-level combinations; ignored + line = line.strip() + if len(line) > 0 and line[0] != '#': + name, uc = tuple([c.strip() for c in line.split(';')]) + if uc.find(' ') == -1: + # it's a 1:1 mapping + m[int(uc, 16)] = name + return m + + +def revCharMap(ucToNames): + # {2126:['Omega','Omegagr']} -> {'Omega':2126, 'Omegagr':2126} + # {2126:'Omega'} -> {'Omega':2126} + m = {} + if len(ucToNames) == 0: + return m + + lists = True + for v in ucToNames.itervalues(): + lists = not isinstance(v, str) + break + + if lists: + for uc, names in ucToNames.iteritems(): + for name in names: + m[name] = uc + else: + for uc, name in ucToNames.iteritems(): + m[name] = uc + + return m + + +def loadJSONGlyphOrder(jsonFilename): + gol = None + if jsonFilename == '-': + gol = json.load(sys.stdin) + else: + with open(jsonFilename, 'r') as f: + gol = json.load(f) + if not isinstance(gol, list): + raise Exception('expected [[string, int|null]') + if len(gol) > 0: + for v in gol: + if not isinstance(v, list): + raise Exception('expected [[string, int|null]]') + break + return gol + + +def loadTTGlyphOrder(font): + if isinstance(font, str): + font = ttLib.TTFont(font) + + if not 'cmap' in font: + raise Exception('missing cmap table') + + bestCodeSubTable = None + bestCodeSubTableFormat = 0 + + for st in font['cmap'].tables: + if st.platformID == 0: # 0=unicode, 1=mac, 2=(reserved), 3=microsoft + if st.format > bestCodeSubTableFormat: + bestCodeSubTable = st + bestCodeSubTableFormat = st.format + + ucmap = {} + if bestCodeSubTable is not None: + for cp, glyphname in bestCodeSubTable.cmap.items(): + ucmap[glyphname] = cp + + gol = [] + for name in font.getGlyphOrder(): + gol.append((name, ucmap.get(name))) + + return gol + + +def loadSrcGlyphOrder(jsonFilename, fontFilename): # -> [ ('Omegagreek', 2126|None), ...] + if jsonFilename: + return loadJSONGlyphOrder(jsonFilename) + elif fontFilename: + return loadTTGlyphOrder(fontFilename.rstrip('/ ')) + return None + + +def loadUFOGlyphNames(ufoPath): + font = OpenFont(ufoPath) + + libPlist = PList(os.path.join(ufoPath, 'lib.plist')) + orderedNames = libPlist['public.glyphOrder'] # [ 'Omega', ...] + + # append any glyphs that are missing in orderedNames + allNames = set(font.keys()) + for name in orderedNames: + allNames.discard(name) + for name in allNames: + orderedNames.append(name) + + ucToNames = font.getCharacterMapping() # { 2126: [ 'Omega', ...], ...} + nameToUc = revCharMap(ucToNames) # { 'Omega': 2126, ...} + + gol = OrderedDict() # OrderedDict{ ('Omega', 2126|None), ...} + for name in orderedNames: + gol[name] = nameToUc.get(name) + # gol.append((name, nameToUc.get(name))) + + return gol, ucToNames, nameToUc, libPlist + + +def saveUFOGlyphOrder(libPlist, orderedNames, dryRun): + libPlist['public.glyphOrder'] = orderedNames + + roboSort = libPlist.get('com.typemytype.robofont.sort') + if roboSort is not None: + # lib['com.typemytype.robofont.sort'] has schema + # [ { type: "glyphList", ascending: [glyphname, ...] }, ...] + for i in range(len(roboSort)): + ent = roboSort[i] + if isinstance(ent, dict) and ent.get('type') == 'glyphList': + roboSort[i] = {'type':'glyphList', 'ascending':orderedNames} + break + + print('Writing', libPlist.filename) + if not dryRun: + libPlist.save() + + +def getConfigResFile(config, basedir, name): + fn = os.path.join(basedir, config.get("res", name)) + if not os.path.isfile(fn): + basedir = os.path.dirname(basedir) + fn = os.path.join(basedir, config.get("res", name)) + if not os.path.isfile(fn): + fn = None + return fn + + +def main(): + argparser = ArgumentParser(description='Rewrite glyph order of UFO fonts') + + argparser.add_argument( + '-dry', dest='dryRun', action='store_const', const=True, default=False, + help='Do not modify anything, but instead just print what would happen.') + + argparser.add_argument( + '-src-json', dest='srcJSONFile', metavar='', type=str, + help='JSON file to read glyph order from.' + + ' Should be a list e.g. [["Omega", 2126], [".notdef", null], ...]') + + argparser.add_argument( + '-src-font', dest='srcFontFile', metavar='', type=str, + help='TrueType or OpenType font to read glyph order from.') + + argparser.add_argument( + '-out', dest='outFile', metavar='', type=str, + help='Write each name per line to ') + + argparser.add_argument( + 'dstFontsPaths', metavar='', type=str, nargs='+', help='UFO fonts to update') + + args = argparser.parse_args() + dryRun = args.dryRun + + if args.srcJSONFile and args.srcFontFile: + argparser.error('Both -src-json and -src-font specified -- please provide only one.') + + srcGol = loadSrcGlyphOrder(args.srcJSONFile, args.srcFontFile) + if srcGol is None: + argparser.error('No source provided (-src-* argument missing)') + + # Load Adobe Glyph List database + srcDir = os.path.dirname(args.dstFontsPaths[0]) + config = RawConfigParser(dict_type=OrderedDict) + config.read(os.path.join(srcDir, 'fontbuild.cfg')) + aglUcToName = parseAGL(getConfigResFile(config, srcDir, 'agl_glyphlistfile')) + aglNameToUc = revCharMap(aglUcToName) + + glyphorderUnion = OrderedDict() + + for dstFontPath in args.dstFontsPaths: + glyphOrder, ucToNames, nameToUc, libPlist = loadUFOGlyphNames(dstFontPath) + + newGol = OrderedDict() + for name, uc in srcGol: + + if uc is None: + # if there's no unicode associated, derive from name if possible + m = uniNameRe.match(name) + if m: + try: + uc = int(m.group(1), 16) + except: + pass + if uc is None: + uc = aglNameToUc.get(name) + + # has same glyph mapped to same unicode + names = ucToNames.get(uc) + if names is not None: + for name in names: + # print('U %s U+%04X' % (name, uc)) + newGol[name] = uc + continue + + # has same name in dst? + uc2 = glyphOrder.get(name) + if uc2 is not None: + # print('N %s U+%04X' % (name, uc2)) + newGol[name] = uc2 + continue + + # Try AGL[uc] -> name == name + if uc is not None: + name2 = aglUcToName.get(uc) + if name2 is not None: + uc2 = glyphOrder.get(name2) + if uc2 is not None: + # print('A %s U+%04X' % (name2, uc2)) + newGol[name2] = uc2 + continue + + # else: ignore glyph name in srcGol not found in target + # if uc is None: + # print('x %s -' % name) + # else: + # print('x %s U+%04X' % (name, uc)) + + + # add remaining glyphs from original glyph order + for name, uc in glyphOrder.iteritems(): + if name not in newGol: + # print('E %s U+%04X' % (name, uc)) + newGol[name] = uc + + orderedNames = [] + for name in newGol.iterkeys(): + orderedNames.append(name) + glyphorderUnion[name] = True + + saveUFOGlyphOrder(libPlist, orderedNames, dryRun) + + if args.outFile: + print('Write', args.outFile) + glyphorderUnionNames = glyphorderUnion.keys() + if not dryRun: + with open(args.outFile, 'w') as f: + f.write('\n'.join(glyphorderUnionNames) + '\n') + + +if __name__ == '__main__': + main() diff --git a/misc/rf-scripts/AdjustWidth.py b/misc/rf-scripts/AdjustWidth.py new file mode 100644 index 000000000..c3d381f68 --- /dev/null +++ b/misc/rf-scripts/AdjustWidth.py @@ -0,0 +1,53 @@ +# +# This script changes the width of all glyphs by applying a multiplier. +# It keeps the contours centered as glyphs get wider or tighter. +# +from mojo.roboFont import version +from math import ceil, floor + +if __name__ == "__main__": + font = CurrentFont() + print "Resizing glyph margins for %r" % font + + # how much to add or remove from each glyph's margin + A = -16 + + if font is not None: + for g in font: + # skip glyphs + if g.name in ('c', 'e', 'o', 'r', 'j'): + continue + + if g.width < 2: + print '"%s": ["ignore", "zero-width"],' % (g.name) + continue + + if g.box is None: + print '"%s": ["ignore", "empty"],' % (g.name) + continue + + if g.width % 16 != 0: + print '"%s": ["ignore", "misaligned"],' % (g.name) + continue + + if g.leftMargin <= 0 or g.rightMargin <= 0: + print '"%s": ["ignore", "zero-or-negative"],' % (g.name) + continue + + leftMargin = int(max(0, g.leftMargin + A)) + rightMargin = int(max(0, g.rightMargin + A)) + + #print '"%s": ["update", %g, %g],' % (g.name, leftMargin, rightMargin) + if 'interface.spaceadjust' in g.lib: + g.lib['interface.width-adjustments'].append(A) + else: + g.lib['interface.width-adjustments'] = [A] + # order of assignment is probably important + g.rightMargin = int(rightMargin) + g.leftMargin = int(leftMargin) + + font.update() + else: + print "No fonts open" + + print "Done" diff --git a/misc/rf-scripts/ChangeUPM.py b/misc/rf-scripts/ChangeUPM.py new file mode 100644 index 000000000..f7617353a --- /dev/null +++ b/misc/rf-scripts/ChangeUPM.py @@ -0,0 +1,107 @@ +# Change upm +# Jens Kutilek 2013-01-02 + +from mojo.roboFont import version + +def scalePoints(glyph, factor): + if version == "1.4": + # stupid workaround for bug in RoboFont 1.4 + for contour in glyph: + for point in contour.points: + point.x *= factor + point.y *= factor + glyph.width *= factor + else: + glyph *= factor + +def scaleGlyph(glyph, factor, scaleWidth=True, roundCoordinates=True): + if not(scaleWidth): + oldWidth = glyph.width + if len(glyph.components) == 0: + scalePoints(glyph, factor) + if roundCoordinates: + glyph.round() + else: + # save components + # this may be a tad too convoluted ... + components = [] + for i in range(len(glyph.components)): + components.append(glyph.components[i]) + for c in components: + glyph.removeComponent(c) + scalePoints(glyph, factor) + if roundCoordinates: + glyph.round() + # restore components + for i in range(len(components)): + newOffset = (int(round(components[i].offset[0] * factor)), + int(round(components[i].offset[1] * factor))) + glyph.appendComponent(components[i].baseGlyph, newOffset, components[i].scale) + if not(scaleWidth): + # restore width + glyph.width = oldWidth + + +def changeUPM(font, factor, roundCoordinates=True): + + # Glyphs + for g in font: + scaleGlyph(g, factor) + for guide in g.guides: + # another thing that doesn't work in RoboFont 1.4 - 1.5.1 + guide.x *= factor + guide.y *= factor + + # Glyph layers + mainLayer = "foreground" + for layerName in font.layerOrder: + if layerName != mainLayer: + for g in font: + g.flipLayers(mainLayer, layerName) + scaleGlyph(g, factor, scaleWidth=False) + g.flipLayers(layerName, mainLayer) + + # Kerning + if font.kerning: + font.kerning.scale(factor) + if roundCoordinates: + if not version in ["1.4", "1.5", "1.5.1"]: + font.kerning.round(1) + else: + print "WARNING: kerning values cannot be rounded to integer in this RoboFont version" + + # TODO: Change positioning feature code? + + # Vertical dimensions + font.info.descender = int(round(font.info.descender * factor)) + font.info.xHeight = int(round(font.info.xHeight * factor)) + font.info.capHeight = int(round(font.info.capHeight * factor)) + font.info.ascender = int(round(font.info.ascender * factor)) + + # Finally set new UPM + font.info.unitsPerEm = newUpm + + font.update() + +if __name__ == "__main__": + from robofab.interface.all.dialogs import AskString + + print "Change Units Per Em" + + if CurrentFont() is not None: + oldUpm = CurrentFont().info.unitsPerEm + newUpm = CurrentFont().info.unitsPerEm + try: + newUpm = int(AskString("New units per em size?", oldUpm)) + except: + pass + if newUpm == oldUpm: + print " Not changing upm size." + else: + factor = float(newUpm) / oldUpm + print " Scaling all font measurements by", factor + changeUPM(CurrentFont(), factor) + else: + print " Open a font first to change upm, please." + + print " Done." diff --git a/misc/rf-scripts/GridAdjust.py b/misc/rf-scripts/GridAdjust.py new file mode 100644 index 000000000..f14550b4a --- /dev/null +++ b/misc/rf-scripts/GridAdjust.py @@ -0,0 +1,83 @@ +# +# This script changes the width of any glyph which width is not an even multiple of 256. +# For glyphs that are updated, the shape(s) inside the glyph are centered as well. +# +from mojo.roboFont import version +from math import ceil, floor + +if __name__ == "__main__": + font = CurrentFont() + print "Fitting glyphs to EM grid at 256 %r" % font + + # Strategy to use for centering a glyph when resizing its EM: + # "center" Ignore existing margins and center in EM at on integer units. + # "adjust-margins" Attempt to retain existing margins w/o centering inside EM. + centeringStrategy = 'center' + + if font is not None: + for g in font: + # only consider adjusting the listed glyphs + # if g.unicode not in (0x212B, 0x005A, 0x0387): + # continue + + if g.width < 2: + # ignore zero width glyph + # print 'ignoring %r -- zero width' % g + continue + + if g.width % 256 == 0: + # ignore already aligned glyph + # print 'ignoring %r -- already aligned' % g + continue + + width = g.width + if g.rightMargin < 128: + width = ceil(width / 256) * 256 + else: + width = round(width / 256) * 256 + + # center glyph in EM + leftMargin = g.leftMargin + rightMargin = g.rightMargin + + if centeringStrategy == 'adjust-margins': + # Adjust margins to place the glyph in the center while retaining original + # left/right margins. + widthDelta = width - g.width + leftMargin = g.leftMargin + int(floor(widthDelta / 2)) + rightMargin = g.rightMargin + int(ceil(widthDelta / 2)) + elif centeringStrategy == 'center': + # Read g.box (effective bounds of the glyph) and truly center the + # glyph, but we could run the risk of losing some intentionally-left or right + # aligned glyph, e.g. "|x |" -> "| x |" + if g.box is not None: + xMin, yMin, xMax, yMax = g.box + graphicWidth = xMax - xMin + leftMargin = round((width - graphicWidth) / 2) + else: + print 'Unexpected centeringStrategy value' + break + + # log message + uniname = '' + if g.unicode is not None: + uniname = ' U+%04X' % g.unicode + print 'Adjusting "%s"%s from %g to %g' % (g.name, uniname, g.width, width) + + # write changes to glyph + g.lib['interface.gridadjust.original'] = repr({ + "rightMargin": g.rightMargin, + "leftMargin": g.leftMargin, + "width": g.width, + }) + + # order of assignment is probably important + g.rightMargin = int(rightMargin) + g.leftMargin = int(leftMargin) + g.width = int(width) + + font.update() + else: + print "No fonts open" + + print "Done" diff --git a/misc/rf-scripts/RemoveLocalGuides.py b/misc/rf-scripts/RemoveLocalGuides.py new file mode 100644 index 000000000..05e1a05b7 --- /dev/null +++ b/misc/rf-scripts/RemoveLocalGuides.py @@ -0,0 +1,15 @@ +# +# Removes local guides from all glyphs +# +if __name__ == "__main__": + font = CurrentFont() + print "Removing local guides from all glyphs of %r" % font + if font is not None: + for g in font: + if 'com.typemytype.robofont.guides' in g.lib: + del(g.lib['com.typemytype.robofont.guides']) + font.update() + else: + print "No fonts open" + + print "Done" diff --git a/misc/rf-scripts/StripGlyphs.py b/misc/rf-scripts/StripGlyphs.py new file mode 100644 index 000000000..12bc2ab88 --- /dev/null +++ b/misc/rf-scripts/StripGlyphs.py @@ -0,0 +1,384 @@ +# +# Removes unused glyphs +# +from mojo.roboFont import version + +SC_ROMAN = [ + "A.smcp", + "B.smcp", + "C.smcp", + "D.smcp", + "E.smcp", + "F.smcp", + "G.smcp", + "H.smcp", + "I.smcp", + "J.smcp", + "K.smcp", + "L.smcp", + "M.smcp", + "N.smcp", + "O.smcp", + "P.smcp", + "Q.smcp", + "R.smcp", + "S.smcp", + "T.smcp", + "U.smcp", + "V.smcp", + "W.smcp", + "X.smcp", + "Y.smcp", + "Z.smcp", + "AE.smcp", + "AEacute.smcp", + "Aacute.smcp", + "Abreve.smcp", + "Acircumflex.smcp", + "Adieresis.smcp", + "Agrave.smcp", + "Alpha.smcp", + "Alphatonos.smcp", + "Amacron.smcp", + "Aogonek.smcp", + "Aogonek.smcp.NAV", + "Aring.smcp", + "Aringacute.smcp", + "Atilde.smcp", + "Beta.smcp", + "Cacute.smcp", + "Ccaron.smcp", + "Ccedilla.smcp", + "Ccircumflex.smcp", + "Chi.smcp", + "Dcaron.smcp", + "Dcroat.smcp", + "Delta.smcp", + "Eacute.smcp", + "Ebreve.smcp", + "Ecaron.smcp", + "Ecircumflex.smcp", + "Edieresis.smcp", + "Edotaccent.smcp", + "Egrave.smcp", + "Emacron.smcp", + "Eng.smcp", + "Eogonek.smcp", + "Eogonek.smcp.NAV", + "Epsilon.smcp", + "Epsilontonos.smcp", + "Eta.smcp", + "Etatonos.smcp", + "Eth.smcp", + "Gamma.smcp", + "Gbreve.smcp", + "Gcircumflex.smcp", + "Gcommaaccent.smcp", + "Germandbls.smcp", + "Hbar.smcp", + "Hcircumflex.smcp", + "IJ.smcp", + "Iacute.smcp", + "Ibreve.smcp", + "Icircumflex.smcp", + "Idieresis.smcp", + "Igrave.smcp", + "Imacron.smcp", + "Iogonek.smcp", + "Iota.smcp", + "Iotadieresis.smcp", + "Iotatonos.smcp", + "Itilde.smcp", + "Jcircumflex.smcp", + "Kappa.smcp", + "Kcommaaccent.smcp", + "Lacute.smcp", + "Lambda.smcp", + "Lcaron.smcp", + "Lcommaaccent.smcp", + "Ldot.smcp", + "Lslash.smcp", + "Nacute.smcp", + "Ncaron.smcp", + "Ncommaaccent.smcp", + "Ntilde.smcp", + "Nu.smcp", + "OE.smcp", + "Oacute.smcp", + "Obreve.smcp", + "Ocircumflex.smcp", + "Odieresis.smcp", + "Ograve.smcp", + "Ohungarumlaut.smcp", + "Omacron.smcp", + "Omega.smcp", + "Omegatonos.smcp", + "Omicron.smcp", + "Omicrontonos.smcp", + "Oogonek.smcp", + "Oogonek.smcp.NAV", + "Oslash.smcp", + "Oslashacute.smcp", + "Otilde.smcp", + "Phi.smcp", + "Pi.smcp", + "Psi.smcp", + "Racute.smcp", + "Rcaron.smcp", + "Rcommaaccent.smcp", + "Rho.smcp", + "Sacute.smcp", + "Scaron.smcp", + "Scedilla.smcp", + "Scircumflex.smcp", + "Sigma.smcp", + "Tau.smcp", + "Tbar.smcp", + "Tcaron.smcp", + "Theta.smcp", + "Thorn.smcp", + "Uacute.smcp", + "Ubreve.smcp", + "Ucircumflex.smcp", + "Udieresis.smcp", + "Ugrave.smcp", + "Uhungarumlaut.smcp", + "Umacron.smcp", + "Uogonek.smcp", + "Upsilon.smcp", + "Upsilondieresis.smcp", + "Upsilontonos.smcp", + "Uring.smcp", + "Utilde.smcp", + "Wacute.smcp", + "Wcircumflex.smcp", + "Wdieresis.smcp", + "Wgrave.smcp", + "Xi.smcp", + "Yacute.smcp", + "Ycircumflex.smcp", + "Ydieresis.smcp", + "Ygrave.smcp", + "Zacute.smcp", + "Zcaron.smcp", + "Zdotaccent.smcp", + "Zeta.smcp", + "ampersand.smcp", + "uni010A.smcp", + "uni0120.smcp", + "uni0162.smcp", + "Scommaaccent.smcp", + "Tcommaaccent.smcp", + "uni037F.smcp" +] + + +SC_SET1 = [ + "zero.smcp", + "one.smcp", + "two.smcp", + "three.smcp", + "four.smcp", + "five.smcp", + "six.smcp", + "seven.smcp", + "eight.smcp", + "nine.smcp", + "Euro.smcp", + "Idotaccent.smcp", + "Mu.smcp", + "dollar.smcp", + "lira.smcp", + "sterling.smcp", + "uni0401.smcp", + "uni0402.smcp", + "uni0403.smcp", + "uni0404.smcp", + "uni0405.smcp", + "uni0406.smcp", + "uni0407.smcp", + "uni0408.smcp", + "uni0409.smcp", + "uni040A.smcp", + "uni040B.smcp", + "uni040C.smcp", + "uni040E.smcp", + "uni040F.smcp", + "uni0410.smcp", + "uni0411.smcp", + "uni0412.smcp", + "uni0413.smcp", + "uni0414.smcp", + "uni0415.smcp", + "uni0416.smcp", + "uni0417.smcp", + "uni0418.smcp", + "uni0419.smcp", + "uni041A.smcp", + "uni041B.smcp", + "uni041C.smcp", + "uni041D.smcp", + "uni041E.smcp", + "uni041F.smcp", + "uni0420.smcp", + "uni0421.smcp", + "uni0422.smcp", + "uni0423.smcp", + "uni0424.smcp", + "uni0425.smcp", + "uni0426.smcp", + "uni0427.smcp", + "uni0428.smcp", + "uni0429.smcp", + "uni042A.smcp", + "uni042B.smcp", + "uni042C.smcp", + "uni042D.smcp", + "uni042E.smcp", + "uni042F.smcp", + "uni0490.smcp", + "uni0492.smcp", + "uni0496.smcp", + "uni0498.smcp", + "uni049A.smcp", + "uni049C.smcp", + "uni04A0.smcp", + "uni04A2.smcp", + "uni04A8.smcp", + "uni04AA.smcp", + "uni04AE.smcp", + "uni04B0.smcp", + "uni04B2.smcp", + "uni04B4.smcp", + "uni04B8.smcp", + "uni04BA.smcp", + "uni04BC.smcp", + "uni04BE.smcp", + "uni04D8.smcp", + "uni04E0.smcp", + "uni04E2.smcp", + "uni04E8.smcp", + "uni04EE.smcp", + "uni20B4.smcp", + "uni20B8.smcp", + "uni20BD.smcp", + "uni2116.smcp", + "yen.smcp" +] + + +SC_SET2 = [ + "I.smcp", + "Sigma.smcp", + "Mu.smcp", + "uni0410.smcp", + "uni0411.smcp", + "uni0412.smcp", + "uni0413.smcp", + "uni0414.smcp", + "uni0415.smcp", + "uni0416.smcp", + "uni0417.smcp", + "uni0418.smcp", + "uni0419.smcp", + "uni041A.smcp", + "uni041B.smcp", + "uni041C.smcp", + "uni041D.smcp", + "uni041E.smcp", + "uni041F.smcp", + "uni0420.smcp", + "uni0421.smcp", + "uni0422.smcp", + "uni0423.smcp", + "uni0424.smcp", + "uni0425.smcp", + "uni0426.smcp", + "uni0427.smcp", + "uni0428.smcp", + "uni0429.smcp", + "uni042A.smcp", + "uni042B.smcp", + "uni042C.smcp", + "uni042D.smcp", + "uni042E.smcp", + "uni042F.smcp", + "uni0401.smcp", + "uni0402.smcp", + "uni0403.smcp", + "uni0404.smcp", + "uni0405.smcp", + "uni0406.smcp", + "uni0407.smcp", + "uni0408.smcp", + "uni0409.smcp", + "uni040A.smcp", + "uni040B.smcp", + "uni040C.smcp", + "uni040E.smcp", + "uni040F.smcp", + "uni0490.smcp", + "uni0492.smcp", + "uni0496.smcp", + "uni0498.smcp", + "uni049A.smcp", + "uni049C.smcp", + "uni04A0.smcp", + "uni04A2.smcp", + "uni04A8.smcp", + "uni04AA.smcp", + "uni04AE.smcp", + "uni04B0.smcp", + "uni04B2.smcp", + "uni04B4.smcp", + "uni04B8.smcp", + "uni04BA.smcp", + "uni04BC.smcp", + "uni04BE.smcp", + "uni04D8.smcp", + "uni04E0.smcp", + "uni04E2.smcp", + "uni04E8.smcp", + "uni04EE.smcp" +] + + +STRIP_NAME_SET = set(SC_ROMAN).union(SC_SET1).union(SC_SET2) + +STRIP_SUFFIXES = ( + '.smcp', + '.unic', + '.alt', + '.alt2', + '.ss06', + '.ss07', + '.onum', + '.pnum', + '.tnum' +) + +def hasStripSuffix(g): + name = g.name + for suffix in STRIP_SUFFIXES: + if str.endswith(name, suffix): + return True + return False + +if __name__ == "__main__": + font = CurrentFont() + if font is not None: + for g in font: + if g.name in STRIP_NAME_SET or hasStripSuffix(g): + + if g.unicode is not None: + # glyph maps to a codepoint -- keep it + continue + + print 'Removing "%s"' % g.name + + font.removeGlyph(g.name) + font.update() + else: + print "No fonts open" + + print "Done" diff --git a/misc/rf-scripts/ZeroWidth.py b/misc/rf-scripts/ZeroWidth.py new file mode 100644 index 000000000..a9277d09c --- /dev/null +++ b/misc/rf-scripts/ZeroWidth.py @@ -0,0 +1,26 @@ +# +# This script changes the width of all glyphs by applying a multiplier. +# It keeps the contours centered as glyphs get wider or tighter. +# +from mojo.roboFont import version +from math import ceil, floor + +if __name__ == "__main__": + font = CurrentFont() + print "Resizing glyph margins for %r" % font + + if font is not None: + for g in font: + leftMargin = g.leftMargin + rightMargin = g.rightMargin + + if leftMargin < 0 or rightMargin < 0: + g.rightMargin = int(max(0, rightMargin)) + g.leftMargin = int(max(0, leftMargin)) + print("adjust %s" % g.name) + + font.update() + else: + print "No fonts open" + + print "Done" diff --git a/misc/stems.txt b/misc/stems.txt new file mode 100644 index 000000000..0cedc8909 --- /dev/null +++ b/misc/stems.txt @@ -0,0 +1,25 @@ + +================================================================================================ +Regular +•••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••• + +Horizontal: + 220 A B D E F G H L P R T Z two three(center) four five seven + 200 a e f t z minus + +Vertical: + 248 B D E F G H I J K L N P R T U Y one four + 236 a b d f g h i j k l m n p q r t u + 232 M + + + +================================================================================================ +Bold +•••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••• + +Horizontal: + 380 ? + +Vertical: + 464 ? diff --git a/misc/svgsync.py b/misc/svgsync.py new file mode 100755 index 000000000..84e425194 --- /dev/null +++ b/misc/svgsync.py @@ -0,0 +1,435 @@ +#!/usr/bin/env python +# encoding: utf8 +# +# Sync glyph shapes between SVG and UFO, creating a bridge between UFO and Figma. +# +import os +import sys +import argparse +import re +from xml.dom.minidom import parseString as xmlparseString + +# from robofab.world import world, RFont, RGlyph, OpenFont, NewFont +from robofab.objects.objectsRF import RFont, RGlyph, OpenFont, NewFont, RContour +from robofab.objects.objectsBase import MOVE, LINE, CORNER, CURVE, QCURVE, OFFCURVE + +font = None # RFont +ufopath = '' +svgdir = '' +effectiveAscender = 0 + + +def num(s): + return int(s) if s.find('.') == -1 else float(s) + + +def glyphToSVGPath(g, yMul): + commands = {'move':'M','line':'L','curve':'Y','offcurve':'X','offCurve':'X'} + svg = '' + contours = [] + if len(g.components): + font.newGlyph('__svgsync') + new = font['__svgsync'] + new.width = g.width + new.appendGlyph(g) + new.decompose() + g = new + if len(g): + for c in range(len(g)): + contours.append(g[c]) + for i in range(len(contours)): + c = contours[i] + contour = end = '' + curve = False + points = c.points + if points[0].type == 'offCurve': + points.append(points.pop(0)) + if points[0].type == 'offCurve': + points.append(points.pop(0)) + for x in range(len(points)): + p = points[x] + command = commands[str(p.type)] + if command == 'X': + if curve == True: + command = '' + else: + command = 'C' + curve = True + if command == 'Y': + command = '' + curve = False + if x == 0: + command = 'M' + if p.type == 'curve': + end = ' ' + str(p.x) + ' ' + str(p.y * yMul) + contour += ' ' + command + str(p.x) + ' ' + str(p.y * yMul) + svg += ' ' + contour + end + 'z' + if font.has_key('__svgsync'): + font.removeGlyph('__svgsync') + return svg.strip() + + +def maybeAddMove(contour, x, y, smooth): + if len(contour.segments) == 0: + contour.appendSegment(MOVE, [(x, y)], smooth=smooth) + + + +svgPathDataRegEx = re.compile(r'(?:([A-Z])\s*|)([0-9\.\-\+eE]+)') + + +def drawSVGPath(g, d, tr): + yMul = -1 + xOffs = tr[0] + yOffs = -(font.info.unitsPerEm - tr[1]) + + for pathd in d.split('M'): + pathd = pathd.strip() + # print 'pathd', pathd + if len(pathd) == 0: + continue + i = 0 + closePath = False + if pathd[-1] == 'z': + closePath = True + pathd = pathd[0:-1] + + pv = [] + for m in svgPathDataRegEx.finditer('M' + pathd): + if m.group(1) is not None: + pv.append(m.group(1) + m.group(2)) + else: + pv.append(m.group(2)) + + initX = 0 + initY = 0 + + pen = g.getPen() + + while i < len(pv): + pd = pv[i]; i += 1 + cmd = pd[0] + x = num(pd[1:]) + xOffs + y = (num(pv[i]) + yOffs) * yMul; i += 1 + + if cmd == 'M': + # print cmd, x, y, '/', num(pv[i-2][1:]) + initX = x + initY = y + pen.moveTo((x, y)) + continue + + if cmd == 'C': + # Bezier curve: "C x1 y1, x2 y2, x y" + x1 = x + y1 = y + x2 = num(pv[i]) + xOffs; i += 1 + y2 = (num(pv[i]) + yOffs) * yMul; i += 1 + x = num(pv[i]) + xOffs; i += 1 + y = (num(pv[i]) + yOffs) * yMul; i += 1 + pen.curveTo((x1, y1), (x2, y2), (x, y)) + # print cmd, x1, y1, x2, y2, x, y + + elif cmd == 'L': + pen.lineTo((x, y)) + + else: + raise Exception('unexpected SVG path command %r' % cmd) + + if closePath: + pen.closePath() + else: + pen.endPath() + # print 'path ended. closePath:', closePath + + +def glyphToSVG(g): + width = g.width + height = font.info.unitsPerEm + + d = { + 'name': g.name, + 'width': width, + 'height': effectiveAscender - font.info.descender, + 'effectiveAscender': effectiveAscender, + 'leftMargin': g.leftMargin, + 'rightMargin': g.rightMargin, + 'glyphSVGPath': glyphToSVGPath(g, -1), + 'ascender': font.info.ascender, + 'descender': font.info.descender, + 'baselineOffset': height + font.info.descender, + 'unitsPerEm': font.info.unitsPerEm, + } + + # for kv in d.iteritems(): + # if kv[0] == 'glyphSVGPath': + # print ' %s: ...' % kv[0] + # else: + # print ' %s: %r' % kv + + svg = ''' + + + + + + + ''' % d + # print svg + return svg.strip() + + +def _findPathNodes(n, paths, defs, uses, isDef=False): + for cn in n.childNodes: + if cn.nodeName == 'path': + if isDef: + defs[cn.getAttribute('id')] = cn + else: + paths.append(cn) + elif cn.nodeName == 'use': + uses[cn.getAttribute('xlink:href').lstrip('#')] = {'useNode': cn, 'targetNode': None} + elif cn.nodeName == 'defs': + _findPathNodes(cn, paths, defs, uses, isDef=True) + elif not isinstance(cn, basestring) and cn.childNodes and len(cn.childNodes) > 0: + _findPathNodes(cn, paths, defs, uses, isDef) + # return translate + + +def findPathNodes(n, isDef=False): + paths = [] + defs = {} + uses = {} + # + # + # + # + # + # ... + # + # 0: + m = re.match(r"translate\s*\(\s*(?P[\-\d\.eE]+)[\s,]*(?P[\-\d\.eE]+)\s*\)", tr) + if m is not None: + x += num(m.group('x')) + y += num(m.group('y')) + else: + raise Exception('Unable to handle transform="%s"' % tr) + # m = re.match(r"matrix\s*\(\s*(?P[\-\d\.eE]+)[\s,]*(?P[\-\d\.eE]+)[\s,]*(?P[\-\d\.eE]+)[\s,]*(?P[\-\d\.eE]+)[\s,]*(?P[\-\d\.eE]+)[\s,]*(?P[\-\d\.eE]+)[\s,]*", tr) + # if m is not None: + # a, b, c = num(m.group('a')), num(m.group('b')), num(m.group('c')) + # d, e, f = num(m.group('d')), num(m.group('e')), num(m.group('f')) + # # matrix -1 0 0 -1 -660.719 31947 + # print 'matrix', a, b, c, d, e, f + # # matrix(-1 0 -0 -1 -2553 31943) + pn = path.parentNode + if pn is not None and pn.nodeName != '#document': + x, y = nodeTranslation(pn, x, y) + return (x, y) + + +def glyphUpdateFromSVG(g, svgCode): + doc = xmlparseString(svgCode) + svg = doc.documentElement + paths = findPathNodes(svg) + if len(paths) == 0: + raise Exception('no found in SVG') + path = paths[0] + if len(paths) != 1: + for p in paths: + id = p.getAttribute('id') + if id is not None and id.find('stroke') == -1: + path = p + break + + tr = nodeTranslation(path) + d = path.getAttribute('d') + g.clearContours() + drawSVGPath(g, d, tr) + + +def stat(path): + try: + return os.stat(path) + except OSError as e: + return None + + +def writeFile(file, s): + with open(file, 'w') as f: + f.write(s) + + +def writeFileAndMkDirsIfNeeded(file, s): + try: + writeFile(file, s) + except IOError as e: + if e.errno == 2: + os.makedirs(os.path.dirname(file)) + writeFile(file, s) + + +def syncGlyphUFOToSVG(glyphname, svgFile, mtime): + print glyphname + ': UFO -> SVG' + g = font.getGlyph(glyphname) + svg = glyphToSVG(g) + writeFileAndMkDirsIfNeeded(svgFile, svg) + os.utime(svgFile, (mtime, mtime)) + print 'write', svgFile + + +def syncGlyphSVGToUFO(glyphname, svgFile): + print glyphname + ': SVG -> UFO' + svg = '' + with open(svgFile, 'r') as f: + svg = f.read() + g = font.getGlyph(glyphname) + glyphUpdateFromSVG(g, svg) + + +def findGlifFile(glyphname): + # glyphname.glif + # glyphname_.glif + # glyphname__.glif + # glyphname___.glif + for underscoreCount in range(0, 5): + fn = os.path.join(ufopath, 'glyphs', glyphname + ('_' * underscoreCount) + '.glif') + st = stat(fn) + if st is not None: + return fn, st + + if glyphname.find('.') != -1: + # glyph_.name.glif + # glyph__.name.glif + # glyph___.name.glif + for underscoreCount in range(0, 5): + nv = glyphname.split('.') + nv[0] = nv[0] + ('_' * underscoreCount) + ns = '.'.join(nv) + fn = os.path.join(ufopath, 'glyphs', ns + '.glif') + st = stat(fn) + if st is not None: + return fn, st + + if glyphname.find('_') != -1: + # glyph_name.glif + # glyph_name_.glif + # glyph_name__.glif + # glyph__name.glif + # glyph__name_.glif + # glyph__name__.glif + # glyph___name.glif + # glyph___name_.glif + # glyph___name__.glif + for x in range(0, 4): + for y in range(0, 5): + ns = glyphname.replace('_', '__' + ('_' * x)) + fn = os.path.join(ufopath, 'glyphs', ns + ('_' * y) + '.glif') + st = stat(fn) + if st is not None: + return fn, st + + return ('', None) + + +def syncGlyph(glyphname): + glyphFile, glyphStat = findGlifFile(glyphname) + + svgFile = os.path.join(svgdir, glyphname + '.svg') + svgStat = stat(svgFile) + + if glyphStat is None and svgStat is None: + raise Exception("glyph %r doesn't exist in UFO or SVG directory" % glyphname) + + c = cmp( + 0 if glyphStat is None else glyphStat.st_mtime, + 0 if svgStat is None else svgStat.st_mtime + ) + if c < 0: + syncGlyphSVGToUFO(glyphname, svgFile) + return (glyphFile, svgStat.st_mtime) # glif file in UFO change + it's new mtime + elif c > 0: + syncGlyphUFOToSVG(glyphname, svgFile, glyphStat.st_mtime) + # else: + # print glyphname + ': up to date' + + return (None, 0) # UFO did not change + + +# ———————————————————————————————————————————————————————————————————————— +# main + +argparser = argparse.ArgumentParser(description='Convert UFO glyphs to SVG') + +argparser.add_argument('--svgdir', dest='svgdir', metavar='', type=str, + default='', + help='Write SVG files to . If not specified, SVG files are' + + ' written to: {dirname()/svg//