mirror of
https://github.com/explosion/spaCy.git
synced 2024-12-26 09:56:28 +03:00
Merge remote-tracking branch 'origin/develop' into feature/parser-history-model
This commit is contained in:
commit
bfabc333be
11
.buildkite/sdist.yml
Normal file
11
.buildkite/sdist.yml
Normal file
|
@ -0,0 +1,11 @@
|
|||
steps:
|
||||
-
|
||||
command: "fab env clean make test sdist"
|
||||
label: ":dizzy: :python:"
|
||||
artifact_paths: "dist/*.tar.gz"
|
||||
- wait
|
||||
- trigger: "spacy-sdist-against-models"
|
||||
label: ":dizzy: :hammer:"
|
||||
build:
|
||||
env:
|
||||
SPACY_VERSION: "{$SPACY_VERSION}"
|
4
.gitignore
vendored
4
.gitignore
vendored
|
@ -1,14 +1,12 @@
|
|||
# spaCy
|
||||
spacy/data/
|
||||
corpora/
|
||||
models/
|
||||
/models/
|
||||
keys/
|
||||
|
||||
# Website
|
||||
website/www/
|
||||
website/_deploy.sh
|
||||
website/package.json
|
||||
website/announcement.jade
|
||||
website/.gitignore
|
||||
|
||||
# Cython / C extensions
|
||||
|
|
|
@ -1,3 +1,7 @@
|
|||
'''Train a multi-label convolutional neural network text classifier,
|
||||
using the spacy.pipeline.TextCategorizer component. The model is then added
|
||||
to spacy.pipeline, and predictions are available at `doc.cats`.
|
||||
'''
|
||||
from __future__ import unicode_literals
|
||||
import plac
|
||||
import random
|
||||
|
@ -12,6 +16,11 @@ from spacy.gold import GoldParse, minibatch
|
|||
from spacy.util import compounding
|
||||
from spacy.pipeline import TextCategorizer
|
||||
|
||||
# TODO: Remove this once we're not supporting models trained with thinc <6.9.0
|
||||
import thinc.neural._classes.layernorm
|
||||
thinc.neural._classes.layernorm.set_compat_six_eight(False)
|
||||
|
||||
|
||||
|
||||
def train_textcat(tokenizer, textcat,
|
||||
train_texts, train_cats, dev_texts, dev_cats,
|
||||
|
@ -24,14 +33,15 @@ def train_textcat(tokenizer, textcat,
|
|||
train_docs = [tokenizer(text) for text in train_texts]
|
||||
train_gold = [GoldParse(doc, cats=cats) for doc, cats in
|
||||
zip(train_docs, train_cats)]
|
||||
train_data = zip(train_docs, train_gold)
|
||||
train_data = list(zip(train_docs, train_gold))
|
||||
batch_sizes = compounding(4., 128., 1.001)
|
||||
for i in range(n_iter):
|
||||
losses = {}
|
||||
train_data = tqdm.tqdm(train_data, leave=False) # Progress bar
|
||||
for batch in minibatch(train_data, size=batch_sizes):
|
||||
# Progress bar and minibatching
|
||||
batches = minibatch(tqdm.tqdm(train_data, leave=False), size=batch_sizes)
|
||||
for batch in batches:
|
||||
docs, golds = zip(*batch)
|
||||
textcat.update((docs, None), golds, sgd=optimizer, drop=0.2,
|
||||
textcat.update(docs, golds, sgd=optimizer, drop=0.2,
|
||||
losses=losses)
|
||||
with textcat.model.use_params(optimizer.averages):
|
||||
scores = evaluate(tokenizer, textcat, dev_texts, dev_cats)
|
||||
|
@ -61,12 +71,13 @@ def evaluate(tokenizer, textcat, texts, cats):
|
|||
return {'textcat_p': precis, 'textcat_r': recall, 'textcat_f': fscore}
|
||||
|
||||
|
||||
def load_data():
|
||||
def load_data(limit=0):
|
||||
# Partition off part of the train data --- avoid running experiments
|
||||
# against test.
|
||||
train_data, _ = thinc.extra.datasets.imdb()
|
||||
|
||||
random.shuffle(train_data)
|
||||
train_data = train_data[-limit:]
|
||||
|
||||
texts, labels = zip(*train_data)
|
||||
cats = [(['POSITIVE'] if y else []) for y in labels]
|
||||
|
@ -86,7 +97,7 @@ def main(model_loc=None):
|
|||
textcat = TextCategorizer(tokenizer.vocab, labels=['POSITIVE'])
|
||||
|
||||
print("Load IMDB data")
|
||||
(train_texts, train_cats), (dev_texts, dev_cats) = load_data()
|
||||
(train_texts, train_cats), (dev_texts, dev_cats) = load_data(limit=1000)
|
||||
|
||||
print("Itn.\tLoss\tP\tR\tF")
|
||||
progress = '{i:d} {loss:.3f} {textcat_p:.3f} {textcat_r:.3f} {textcat_f:.3f}'
|
||||
|
|
23
spacy/_ml.py
23
spacy/_ml.py
|
@ -631,6 +631,7 @@ def foreach(layer, drop_factor=1.0):
|
|||
|
||||
def build_text_classifier(nr_class, width=64, **cfg):
|
||||
nr_vector = cfg.get('nr_vector', 5000)
|
||||
pretrained_dims = cfg.get('pretrained_dims', 0)
|
||||
with Model.define_operators({'>>': chain, '+': add, '|': concatenate,
|
||||
'**': clone}):
|
||||
if cfg.get('low_data'):
|
||||
|
@ -638,7 +639,7 @@ def build_text_classifier(nr_class, width=64, **cfg):
|
|||
SpacyVectors
|
||||
>> flatten_add_lengths
|
||||
>> with_getitem(0,
|
||||
Affine(width, 300)
|
||||
Affine(width, pretrained_dims)
|
||||
)
|
||||
>> ParametricAttention(width)
|
||||
>> Pooling(sum_pool)
|
||||
|
@ -665,18 +666,24 @@ def build_text_classifier(nr_class, width=64, **cfg):
|
|||
)
|
||||
)
|
||||
|
||||
if pretrained_dims:
|
||||
static_vectors = (
|
||||
SpacyVectors
|
||||
>> with_flatten(Affine(width, 300))
|
||||
>> with_flatten(Affine(width, pretrained_dims))
|
||||
)
|
||||
|
||||
cnn_model = (
|
||||
# TODO Make concatenate support lists
|
||||
concatenate_lists(trained_vectors, static_vectors)
|
||||
vectors = concatenate_lists(trained_vectors, static_vectors)
|
||||
vectors_width = width*2
|
||||
else:
|
||||
vectors = trained_vectors
|
||||
vectors_width = width
|
||||
static_vectors = None
|
||||
cnn_model = (
|
||||
vectors
|
||||
>> with_flatten(
|
||||
LN(Maxout(width, width*2))
|
||||
LN(Maxout(width, vectors_width))
|
||||
>> Residual(
|
||||
(ExtractWindow(nW=1) >> zero_init(Maxout(width, width*3)))
|
||||
(ExtractWindow(nW=1) >> LN(Maxout(width, width*3)))
|
||||
) ** 2, pad=2
|
||||
)
|
||||
>> flatten_add_lengths
|
||||
|
@ -696,7 +703,7 @@ def build_text_classifier(nr_class, width=64, **cfg):
|
|||
>> zero_init(Affine(nr_class, nr_class*2, drop_factor=0.0))
|
||||
>> logistic
|
||||
)
|
||||
|
||||
model.nO = nr_class
|
||||
model.lsuv = False
|
||||
return model
|
||||
|
||||
|
|
|
@ -3,15 +3,15 @@
|
|||
# https://github.com/pypa/warehouse/blob/master/warehouse/__about__.py
|
||||
|
||||
__title__ = 'spacy-nightly'
|
||||
__version__ = '2.0.0a15'
|
||||
__version__ = '2.0.0a16'
|
||||
__summary__ = 'Industrial-strength Natural Language Processing (NLP) with Python and Cython'
|
||||
__uri__ = 'https://spacy.io'
|
||||
__author__ = 'Explosion AI'
|
||||
__email__ = 'contact@explosion.ai'
|
||||
__license__ = 'MIT'
|
||||
__release__ = False
|
||||
__release__ = True
|
||||
|
||||
__docs_models__ = 'https://spacy.io/docs/usage/models'
|
||||
__docs_models__ = 'https://alpha.spacy.io/usage/models'
|
||||
__download_url__ = 'https://github.com/explosion/spacy-models/releases/download'
|
||||
__compatibility__ = 'https://raw.githubusercontent.com/explosion/spacy-models/master/compatibility.json'
|
||||
__shortcuts__ = 'https://raw.githubusercontent.com/explosion/spacy-models/master/shortcuts.json'
|
||||
|
|
|
@ -33,16 +33,23 @@ numpy.random.seed(0)
|
|||
data_path=("Location of JSON-formatted evaluation data", "positional", None, str),
|
||||
gold_preproc=("Use gold preprocessing", "flag", "G", bool),
|
||||
gpu_id=("Use GPU", "option", "g", int),
|
||||
displacy_path=("Directory to output rendered parses as HTML", "option", "dp", str),
|
||||
displacy_limit=("Limit of parses to render as HTML", "option", "dl", int)
|
||||
)
|
||||
def evaluate(cmd, model, data_path, gpu_id=-1, gold_preproc=False):
|
||||
def evaluate(cmd, model, data_path, gpu_id=-1, gold_preproc=False,
|
||||
displacy_path=None, displacy_limit=25):
|
||||
"""
|
||||
Train a model. Expects data in spaCy's JSON format.
|
||||
Evaluate a model. To render a sample of parses in a HTML file, set an output
|
||||
directory as the displacy_path argument.
|
||||
"""
|
||||
util.use_gpu(gpu_id)
|
||||
util.set_env_log(False)
|
||||
data_path = util.ensure_path(data_path)
|
||||
displacy_path = util.ensure_path(displacy_path)
|
||||
if not data_path.exists():
|
||||
prints(data_path, title="Evaluation data not found", exits=1)
|
||||
if displacy_path and not displacy_path.exists():
|
||||
prints(displacy_path, title="Visualization output directory not found", exits=1)
|
||||
corpus = GoldCorpus(data_path, data_path)
|
||||
nlp = util.load_model(model)
|
||||
dev_docs = list(corpus.dev_docs(nlp, gold_preproc=gold_preproc))
|
||||
|
@ -50,17 +57,26 @@ def evaluate(cmd, model, data_path, gpu_id=-1, gold_preproc=False):
|
|||
scorer = nlp.evaluate(dev_docs, verbose=False)
|
||||
end = timer()
|
||||
nwords = sum(len(doc_gold[0]) for doc_gold in dev_docs)
|
||||
print('Time', end-begin, 'words', nwords, 'w.p.s', nwords/(end-begin))
|
||||
print_results(scorer)
|
||||
print_results(scorer, time=end - begin, words=nwords,
|
||||
wps=nwords / (end - begin))
|
||||
if displacy_path:
|
||||
docs, golds = zip(*dev_docs)
|
||||
render_deps = 'parser' in nlp.meta.get('pipeline', [])
|
||||
render_ents = 'ner' in nlp.meta.get('pipeline', [])
|
||||
render_parses(docs, displacy_path, model_name=model, limit=displacy_limit,
|
||||
deps=render_deps, ents=render_ents)
|
||||
prints(displacy_path, title="Generated %s parses as HTML" % displacy_limit)
|
||||
|
||||
|
||||
def _render_parses(i, to_render):
|
||||
to_render[0].user_data['title'] = "Batch %d" % i
|
||||
with Path('/tmp/entities.html').open('w') as file_:
|
||||
html = displacy.render(to_render[:5], style='ent', page=True)
|
||||
def render_parses(docs, output_path, model_name='', limit=250, deps=True, ents=True):
|
||||
docs[0].user_data['title'] = model_name
|
||||
if ents:
|
||||
with (output_path / 'entities.html').open('w') as file_:
|
||||
html = displacy.render(docs[:limit], style='ent', page=True)
|
||||
file_.write(html)
|
||||
with Path('/tmp/parses.html').open('w') as file_:
|
||||
html = displacy.render(to_render[:5], style='dep', page=True)
|
||||
if deps:
|
||||
with (output_path / 'parses.html').open('w') as file_:
|
||||
html = displacy.render(docs[:limit], style='dep', page=True, options={'compact': True})
|
||||
file_.write(html)
|
||||
|
||||
|
||||
|
@ -88,8 +104,11 @@ def print_progress(itn, losses, dev_scores, wps=0.0):
|
|||
print(tpl.format(itn, **scores))
|
||||
|
||||
|
||||
def print_results(scorer):
|
||||
def print_results(scorer, time, words, wps):
|
||||
results = {
|
||||
'Time': '%.2f s' % time,
|
||||
'Words': words,
|
||||
'Words/s': '%.0f' % wps,
|
||||
'TOK': '%.2f' % scorer.token_acc,
|
||||
'POS': '%.2f' % scorer.tags_acc,
|
||||
'UAS': '%.2f' % scorer.uas,
|
||||
|
|
14
spacy/tests/regression/test_issue1380.py
Normal file
14
spacy/tests/regression/test_issue1380.py
Normal file
|
@ -0,0 +1,14 @@
|
|||
from __future__ import unicode_literals
|
||||
import pytest
|
||||
|
||||
from ...language import Language
|
||||
|
||||
def test_issue1380_empty_string():
|
||||
nlp = Language()
|
||||
doc = nlp('')
|
||||
assert len(doc) == 0
|
||||
|
||||
@pytest.mark.models('en')
|
||||
def test_issue1380_en(EN):
|
||||
doc = EN('')
|
||||
assert len(doc) == 0
|
|
@ -8,4 +8,5 @@ include _includes/_mixins
|
|||
| does not exist!
|
||||
|
||||
h2.c-landing__title.u-heading-3.u-padding-small
|
||||
a(href="javascript:history.go(-1)") Click here to go back.
|
||||
+button(false, true, "secondary-light")(href="javascript:history.go(-1)")
|
||||
| Click here to go back
|
||||
|
|
|
@ -3,24 +3,22 @@
|
|||
"landing": true,
|
||||
"logos": [
|
||||
{
|
||||
"quora": [ "https://www.quora.com", 150 ],
|
||||
"chartbeat": [ "https://chartbeat.com", 200 ],
|
||||
"duedil": [ "https://www.duedil.com", 150 ],
|
||||
"stitchfix": [ "https://www.stitchfix.com", 190 ]
|
||||
"airbnb": [ "https://www.airbnb.com", 150, 45],
|
||||
"quora": [ "https://www.quora.com", 120, 34 ],
|
||||
"retriever": [ "https://www.retriever.no", 150, 33 ],
|
||||
"stitchfix": [ "https://www.stitchfix.com", 150, 18 ]
|
||||
},
|
||||
{
|
||||
"wayblazer": [ "http://wayblazer.com", 200 ],
|
||||
"indico": [ "https://indico.io", 150 ],
|
||||
"chattermill": [ "https://chattermill.io", 175 ],
|
||||
"turi": [ "https://turi.com", 150 ],
|
||||
"kip": [ "http://kipthis.com", 70 ]
|
||||
},
|
||||
"chartbeat": [ "https://chartbeat.com", 180, 25 ],
|
||||
"allenai": [ "https://allenai.org", 220, 37 ]
|
||||
}
|
||||
],
|
||||
"features": [
|
||||
{
|
||||
"socrata": [ "https://www.socrata.com", 150 ],
|
||||
"cytora": [ "http://www.cytora.com", 125 ],
|
||||
"signaln": [ "http://signaln.com", 150 ],
|
||||
"wonderflow": [ "http://www.wonderflow.co", 200 ],
|
||||
"synapsify": [ "http://www.gosynapsify.com", 150 ]
|
||||
"thoughtworks": ["https://www.thoughtworks.com/radar/tools", 150, 28],
|
||||
"wapo": ["https://www.washingtonpost.com/news/wonk/wp/2016/05/18/googles-new-artificial-intelligence-cant-understand-these-sentences-can-you/", 100, 77],
|
||||
"venturebeat": ["https://venturebeat.com/2017/01/27/4-ai-startups-that-analyze-customer-reviews/", 150, 19],
|
||||
"microsoft": ["https://www.microsoft.com/developerblog/2016/09/13/training-a-classifier-for-relation-extraction-from-medical-literature/", 130, 28]
|
||||
}
|
||||
]
|
||||
},
|
||||
|
@ -34,7 +32,24 @@
|
|||
"landing": true
|
||||
},
|
||||
|
||||
"announcement" : {
|
||||
"title": "Important Announcement"
|
||||
"styleguide": {
|
||||
"title": "Styleguide",
|
||||
"sidebar": {
|
||||
"Styleguide": { "": "styleguide" },
|
||||
"Resources": {
|
||||
"Website Source": "https://github.com/explosion/spacy/tree/master/website",
|
||||
"Contributing Guide": "https://github.com/explosion/spaCy/blob/master/CONTRIBUTING.md"
|
||||
}
|
||||
},
|
||||
"menu": {
|
||||
"Introduction": "intro",
|
||||
"Logo": "logo",
|
||||
"Colors": "colors",
|
||||
"Typography": "typography",
|
||||
"Elements": "elements",
|
||||
"Components": "components",
|
||||
"Embeds": "embeds",
|
||||
"Markup Reference": "markup"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -11,12 +11,9 @@
|
|||
"COMPANY": "Explosion AI",
|
||||
"COMPANY_URL": "https://explosion.ai",
|
||||
"DEMOS_URL": "https://demos.explosion.ai",
|
||||
"MODELS_REPO": "explosion/spacy-models",
|
||||
|
||||
"SPACY_VERSION": "1.8",
|
||||
"LATEST_NEWS": {
|
||||
"url": "https://github.com/explosion/spaCy/releases/tag/v2.0.0-alpha",
|
||||
"title": "Test spaCy v2.0.0 alpha!"
|
||||
},
|
||||
"SPACY_VERSION": "2.0",
|
||||
|
||||
"SOCIAL": {
|
||||
"twitter": "spacy_io",
|
||||
|
@ -27,25 +24,23 @@
|
|||
},
|
||||
|
||||
"NAVIGATION": {
|
||||
"Home": "/",
|
||||
"Usage": "/docs/usage",
|
||||
"Reference": "/docs/api",
|
||||
"Demos": "/docs/usage/showcase",
|
||||
"Blog": "https://explosion.ai/blog"
|
||||
"Usage": "/usage",
|
||||
"Models": "/models",
|
||||
"API": "/api"
|
||||
},
|
||||
|
||||
"FOOTER": {
|
||||
"spaCy": {
|
||||
"Usage": "/docs/usage",
|
||||
"API Reference": "/docs/api",
|
||||
"Tutorials": "/docs/usage/tutorials",
|
||||
"Showcase": "/docs/usage/showcase"
|
||||
"Usage": "/usage",
|
||||
"Models": "/models",
|
||||
"API Reference": "/api",
|
||||
"Resources": "/usage/resources"
|
||||
},
|
||||
"Support": {
|
||||
"Issue Tracker": "https://github.com/explosion/spaCy/issues",
|
||||
"StackOverflow": "http://stackoverflow.com/questions/tagged/spacy",
|
||||
"Reddit usergroup": "https://www.reddit.com/r/spacynlp/",
|
||||
"Gitter chat": "https://gitter.im/explosion/spaCy"
|
||||
"Reddit Usergroup": "https://www.reddit.com/r/spacynlp/",
|
||||
"Gitter Chat": "https://gitter.im/explosion/spaCy"
|
||||
},
|
||||
"Connect": {
|
||||
"Twitter": "https://twitter.com/spacy_io",
|
||||
|
@ -74,21 +69,11 @@
|
|||
{"id": "venv", "title": "virtualenv", "help": "Use a virtual environment and install spaCy into a user directory" },
|
||||
{"id": "gpu", "title": "GPU", "help": "Run spaCy on GPU to make it faster. Requires an NVDIA graphics card with CUDA 2+. See section below for more info."}]
|
||||
},
|
||||
{ "id": "model", "title": "Models", "multiple": true, "options": [
|
||||
{ "id": "en", "title": "English", "meta": "50MB" },
|
||||
{ "id": "de", "title": "German", "meta": "645MB" },
|
||||
{ "id": "fr", "title": "French", "meta": "1.33GB" },
|
||||
{ "id": "es", "title": "Spanish", "meta": "377MB"}]
|
||||
}
|
||||
{ "id": "model", "title": "Models", "multiple": true }
|
||||
],
|
||||
|
||||
"QUICKSTART_MODELS": [
|
||||
{ "id": "lang", "title": "Language", "options": [
|
||||
{ "id": "en", "title": "English", "checked": true },
|
||||
{ "id": "de", "title": "German" },
|
||||
{ "id": "fr", "title": "French" },
|
||||
{ "id": "es", "title": "Spanish" }]
|
||||
},
|
||||
{ "id": "lang", "title": "Language"},
|
||||
{ "id": "load", "title": "Loading style", "options": [
|
||||
{ "id": "spacy", "title": "Use spacy.load()", "checked": true, "help": "Use spaCy's built-in loader to load the model by name." },
|
||||
{ "id": "module", "title": "Import as module", "help": "Import the model explicitly as a Python module." }]
|
||||
|
@ -98,50 +83,15 @@
|
|||
}
|
||||
],
|
||||
|
||||
"MODELS": {
|
||||
"en": [
|
||||
{ "id": "en_core_web_sm", "lang": "English", "feats": [1, 1, 1, 1], "size": "50 MB", "license": "CC BY-SA", "def": true },
|
||||
{ "id": "en_core_web_md", "lang": "English", "feats": [1, 1, 1, 1], "size": "1 GB", "license": "CC BY-SA" },
|
||||
{ "id": "en_depent_web_md", "lang": "English", "feats": [1, 1, 1, 0], "size": "328 MB", "license": "CC BY-SA" },
|
||||
{ "id": "en_vectors_glove_md", "lang": "English", "feats": [1, 0, 0, 1], "size": "727 MB", "license": "CC BY-SA" }
|
||||
],
|
||||
"de": [
|
||||
{ "id": "de_core_news_md", "lang": "German", "feats": [1, 1, 1, 1], "size": "645 MB", "license": "CC BY-SA" }
|
||||
],
|
||||
"fr": [
|
||||
{ "id": "fr_depvec_web_lg", "lang": "French", "feats": [1, 1, 0, 1], "size": "1.33 GB", "license": "CC BY-NC" }
|
||||
],
|
||||
"es": [
|
||||
{ "id": "es_core_web_md", "lang": "Spanish", "feats": [1, 1, 1, 1], "size": "377 MB", "license": "CC BY-SA"}
|
||||
]
|
||||
},
|
||||
|
||||
"EXAMPLE_SENTENCES": {
|
||||
"en": "This is a sentence.",
|
||||
"de": "Dies ist ein Satz.",
|
||||
"fr": "C'est une phrase.",
|
||||
"es": "Esto es una frase."
|
||||
},
|
||||
|
||||
"ALPHA": true,
|
||||
"V_CSS": "1.6",
|
||||
"V_JS": "1.2",
|
||||
"V_CSS": "2.0",
|
||||
"V_JS": "2.0",
|
||||
"DEFAULT_SYNTAX": "python",
|
||||
"ANALYTICS": "UA-58931649-1",
|
||||
"MAILCHIMP": {
|
||||
"user": "spacy.us12",
|
||||
"id": "83b0498b1e7fa3c91ce68c3f1",
|
||||
"list": "89ad33e698"
|
||||
},
|
||||
"BADGES": {
|
||||
"pipy": {
|
||||
"badge": "https://img.shields.io/pypi/v/spacy.svg?style=flat-square",
|
||||
"link": "https://pypi.python.org/pypi/spacy"
|
||||
},
|
||||
"conda": {
|
||||
"badge": "https://anaconda.org/conda-forge/spacy/badges/version.svg",
|
||||
"link": "https://anaconda.org/conda-forge/spacy"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,8 +1,6 @@
|
|||
//- 💫 INCLUDES > FOOTER
|
||||
|
||||
include _mixins
|
||||
|
||||
footer.o-footer.u-text.u-border-dotted
|
||||
footer.o-footer.u-text
|
||||
+grid.o-content
|
||||
each group, label in FOOTER
|
||||
+grid-col("quarter")
|
||||
|
@ -13,18 +11,18 @@ footer.o-footer.u-text.u-border-dotted
|
|||
li
|
||||
+a(url)=item
|
||||
|
||||
if SECTION != "docs"
|
||||
if SECTION == "index"
|
||||
+grid-col("quarter")
|
||||
include _newsletter
|
||||
|
||||
if SECTION == "docs"
|
||||
if SECTION != "index"
|
||||
.o-content.o-block.u-border-dotted
|
||||
include _newsletter
|
||||
|
||||
.o-inline-list.u-text-center.u-text-tiny.u-color-subtle
|
||||
span © 2016-#{new Date().getFullYear()} #[+a(COMPANY_URL, true)=COMPANY]
|
||||
|
||||
+a(COMPANY_URL, true)
|
||||
+svg("graphics", "explosion", 45).o-icon.u-color-theme.u-grayscale
|
||||
+a(COMPANY_URL, true)(aria-label="Explosion AI")
|
||||
+icon("explosion", 45).o-icon.u-color-theme.u-grayscale
|
||||
|
||||
+a(COMPANY_URL + "/legal", true) Legal / Imprint
|
||||
|
|
|
@ -1,35 +1,71 @@
|
|||
//- 💫 INCLUDES > FUNCTIONS
|
||||
|
||||
//- More descriptive variables for current.path and current.source
|
||||
//- Descriptive variables, available in the global scope
|
||||
|
||||
- CURRENT = current.source
|
||||
- SECTION = current.path[0]
|
||||
- SUBSECTION = current.path[1]
|
||||
- LANGUAGES = public.models._data.LANGUAGES
|
||||
- MODELS = public.models._data.MODELS
|
||||
- CURRENT_MODELS = MODELS[current.source] || []
|
||||
|
||||
- MODEL_COUNT = Object.keys(MODELS).map(m => Object.keys(MODELS[m]).length).reduce((a, b) => a + b)
|
||||
- MODEL_LANG_COUNT = Object.keys(MODELS).length
|
||||
- LANG_COUNT = Object.keys(LANGUAGES).length
|
||||
|
||||
- MODEL_META = public.models._data.MODEL_META
|
||||
- MODEL_LICENSES = public.models._data.MODEL_LICENSES
|
||||
- MODEL_ACCURACY = public.models._data.MODEL_ACCURACY
|
||||
- EXAMPLE_SENTENCES = public.models._data.EXAMPLE_SENTENCES
|
||||
|
||||
- IS_PAGE = (SECTION != "index") && !landing
|
||||
- IS_MODELS = (SECTION == "models" && LANGUAGES[current.source])
|
||||
- HAS_MODELS = IS_MODELS && CURRENT_MODELS.length
|
||||
|
||||
|
||||
//- Add prefixes to items of an array (for modifier CSS classes)
|
||||
array - [array] list of class names or options, e.g. ["foot"]
|
||||
prefix - [string] prefix to add to each class, e.g. "c-table__row"
|
||||
RETURNS - [array] list of modified class names
|
||||
|
||||
- function prefixArgs(array, prefix) {
|
||||
- return array.map(function(arg) {
|
||||
- return prefix + '--' + arg;
|
||||
- }).join(' ');
|
||||
- return array.map(arg => prefix + '--' + arg).join(' ');
|
||||
- }
|
||||
|
||||
|
||||
//- Convert API paths (semi-temporary fix for renamed sections)
|
||||
path - [string] link path supplied to +api mixin
|
||||
RETURNS - [string] new link path to correct location
|
||||
|
||||
- function convertAPIPath(path) {
|
||||
- if (path.startsWith('spacy#') || path.startsWith('displacy#') || path.startsWith('util#')) {
|
||||
- var comps = path.split('#');
|
||||
- return "top-level#" + comps[0] + '.' + comps[1];
|
||||
- }
|
||||
- else if (path.startsWith('cli#')) {
|
||||
- return "top-level#" + path.split('#')[1];
|
||||
- }
|
||||
- return path;
|
||||
- }
|
||||
|
||||
|
||||
//- Get model components from ID. Components can then be looked up in LANGUAGES
|
||||
and MODEL_META respectively, to get their human-readable form.
|
||||
id - [string] model ID, e.g. "en_core_web_sm"
|
||||
RETURNS - [object] object keyed by components lang, type, genre and size
|
||||
|
||||
- function getModelComponents(id) {
|
||||
- var comps = id.split('_');
|
||||
- return {'lang': comps[0], 'type': comps[1], 'genre': comps[2], 'size': comps[3]}
|
||||
- }
|
||||
|
||||
|
||||
//- Generate GitHub links
|
||||
repo - [string] name of repo owned by explosion
|
||||
filepath - [string] logical path to file relative to repository root
|
||||
branch - [string] optional branch, defaults to "master"
|
||||
RETURNS - [string] the correct link to the file on GitHub
|
||||
|
||||
- function gh(repo, filepath, branch) {
|
||||
- var branch = ALPHA ? 'develop' : branch
|
||||
- return 'https://github.com/' + SOCIAL.github + '/' + repo + (filepath ? '/blob/' + (branch || 'master') + '/' + filepath : '' );
|
||||
- }
|
||||
|
||||
|
||||
//- Get social images
|
||||
|
||||
- function getSocialImg() {
|
||||
- var base = SITE_URL + '/assets/img/social/preview_'
|
||||
- var image = ALPHA ? 'alpha' : 'default'
|
||||
- if (preview) image = preview
|
||||
- else if (SECTION == 'docs' && !ALPHA) image = 'docs'
|
||||
- return base + image + '.jpg'
|
||||
- return 'https://github.com/' + SOCIAL.github + '/' + (repo || '') + (filepath ? '/blob/' + (branch || 'master') + '/' + filepath : '' );
|
||||
- }
|
||||
|
|
|
@ -1,5 +1,13 @@
|
|||
//- 💫 MIXINS > BASE
|
||||
|
||||
//- Section
|
||||
id - [string] anchor assigned to section (used for breadcrumb navigation)
|
||||
|
||||
mixin section(id)
|
||||
section.o-section(id="section-" + id data-section=id)
|
||||
block
|
||||
|
||||
|
||||
//- Aside wrapper
|
||||
label - [string] aside label
|
||||
|
||||
|
@ -11,34 +19,26 @@ mixin aside-wrapper(label)
|
|||
|
||||
block
|
||||
|
||||
//- Date
|
||||
input - [string] date in the format YYYY-MM-DD
|
||||
|
||||
mixin date(input)
|
||||
- var date = new Date(input)
|
||||
- var months = [ 'January', 'February', 'March', 'April', 'May', 'June', 'July', 'August', 'September', 'October', 'November', 'December' ]
|
||||
|
||||
time(datetime=JSON.parse(JSON.stringify(date)))&attributes(attributes)=months[date.getMonth()] + ' ' + date.getDate() + ', ' + date.getFullYear()
|
||||
|
||||
|
||||
//- SVG from map
|
||||
file - [string] SVG file name in /assets/img/
|
||||
//- SVG from map (uses embedded SVG sprite)
|
||||
name - [string] SVG symbol id
|
||||
width - [integer] width in px
|
||||
height - [integer] height in px (default: same as width)
|
||||
|
||||
mixin svg(file, name, width, height)
|
||||
mixin svg(name, width, height)
|
||||
svg(aria-hidden="true" viewBox="0 0 #{width} #{height || width}" width=width height=(height || width))&attributes(attributes)
|
||||
use(xlink:href="/assets/img/#{file}.svg##{name}")
|
||||
use(xlink:href="#svg_#{name}")
|
||||
|
||||
|
||||
//- Icon
|
||||
name - [string] icon name, should be SVG symbol ID
|
||||
size - [integer] icon width and height (default: 20)
|
||||
name - [string] icon name (will be used as symbol id: #svg_{name})
|
||||
width - [integer] icon width (default: 20)
|
||||
height - [integer] icon height (defaults to width)
|
||||
|
||||
mixin icon(name, size)
|
||||
- var size = size || 20
|
||||
+svg("icons", name, size).o-icon(style="min-width: #{size}px")&attributes(attributes)
|
||||
mixin icon(name, width, height)
|
||||
- var width = width || 20
|
||||
- var height = height || width
|
||||
+svg(name, width, height).o-icon(style="min-width: #{width}px")&attributes(attributes)
|
||||
|
||||
|
||||
//- Pro/Con/Neutral icon
|
||||
|
@ -46,8 +46,8 @@ mixin icon(name, size)
|
|||
size - [integer] icon size (optional)
|
||||
|
||||
mixin procon(icon, size)
|
||||
- colors = { pro: "green", con: "red", neutral: "yellow" }
|
||||
+icon(icon, size)(class="u-color-#{colors[icon] || 'subtle'}" aria-label=icon)&attributes(attributes)
|
||||
- colors = { pro: "green", con: "red", neutral: "subtle" }
|
||||
+icon("circle", size || 16)(class="u-color-#{colors[icon] || 'subtle'}" aria-label=icon)&attributes(attributes)
|
||||
|
||||
|
||||
//- Headlines Helper Mixin
|
||||
|
@ -80,8 +80,7 @@ mixin headline(level)
|
|||
|
||||
mixin permalink(id)
|
||||
if id
|
||||
a.u-permalink(id=id href="##{id}")
|
||||
+icon("anchor").u-permalink__icon
|
||||
a.u-permalink(href="##{id}")
|
||||
block
|
||||
|
||||
else
|
||||
|
@ -109,7 +108,7 @@ mixin quickstart(groups, headline, description, hide_results)
|
|||
.c-quickstart__fields
|
||||
for option in group.options
|
||||
input.c-quickstart__input(class="c-quickstart__input--" + (group.input_style ? group.input_style : group.multiple ? "check" : "radio") type=group.multiple ? "checkbox" : "radio" name=group.id id="qs-#{option.id}" value=option.id checked=option.checked)
|
||||
label.c-quickstart__label(for="qs-#{option.id}")!=option.title
|
||||
label.c-quickstart__label.u-text-tiny(for="qs-#{option.id}")!=option.title
|
||||
if option.meta
|
||||
| #[span.c-quickstart__label__meta (#{option.meta})]
|
||||
if option.help
|
||||
|
@ -122,12 +121,10 @@ mixin quickstart(groups, headline, description, hide_results)
|
|||
code.c-code-block__content.c-quickstart__code(data-qs-results="")
|
||||
block
|
||||
|
||||
.c-quickstart__info.u-text-tiny.o-block.u-text-right
|
||||
| Like this widget? Check out #[+a("https://github.com/ines/quickstart").u-link quickstart.js]!
|
||||
|
||||
|
||||
//- Quickstart code item
|
||||
data [object] - Rendering conditions (keyed by option group ID, value: option)
|
||||
data - [object] Rendering conditions (keyed by option group ID, value: option)
|
||||
style - [string] modifier ID for line style
|
||||
|
||||
mixin qs(data, style)
|
||||
- args = {}
|
||||
|
@ -148,6 +145,13 @@ mixin terminal(label)
|
|||
+code.x-terminal__code
|
||||
block
|
||||
|
||||
//- Chart.js
|
||||
id - [string] chart ID, will be assigned as #chart_{id}
|
||||
|
||||
mixin chart(id)
|
||||
figure.o-block&attributes(attributes)
|
||||
canvas(id="chart_#{id}" width="800" height="400" style="max-width: 100%")
|
||||
|
||||
|
||||
//- Gitter chat button and widget
|
||||
button - [string] text shown on button
|
||||
|
@ -156,26 +160,24 @@ mixin terminal(label)
|
|||
mixin gitter(button, label)
|
||||
aside.js-gitter.c-chat.is-collapsed(data-title=(label || button))
|
||||
|
||||
button.js-gitter-button.c-chat__button.u-text-small
|
||||
+icon("chat").o-icon--inline
|
||||
button.js-gitter-button.c-chat__button.u-text-tag
|
||||
+icon("chat", 16).o-icon--inline
|
||||
!=button
|
||||
|
||||
|
||||
//- Badge
|
||||
name - [string] "pipy" or "conda"
|
||||
image - [string] path to badge image
|
||||
url - [string] badge link
|
||||
|
||||
mixin badge(name)
|
||||
- site = BADGES[name]
|
||||
|
||||
if site
|
||||
+a(site.link).u-padding-small
|
||||
img(src=site.badge alt="{name} version" height="20")
|
||||
mixin badge(image, url)
|
||||
+a(url).u-padding-small.u-hide-link&attributes(attributes)
|
||||
img.o-badge(src=image alt=url height="20")
|
||||
|
||||
|
||||
//- Logo
|
||||
//- spaCy logo
|
||||
|
||||
mixin logo()
|
||||
+svg("graphics", "spacy", 675, 215).o-logo&attributes(attributes)
|
||||
+svg("spacy", 675, 215).o-logo&attributes(attributes)
|
||||
|
||||
|
||||
//- Landing
|
||||
|
@ -186,18 +188,56 @@ mixin landing-header()
|
|||
.c-landing__content
|
||||
block
|
||||
|
||||
mixin landing-banner(headline, label)
|
||||
.c-landing__banner.u-padding.o-block.u-color-light
|
||||
+grid.c-landing__banner__content.o-no-block
|
||||
+grid-col("third")
|
||||
h3.u-heading.u-heading-1
|
||||
if label
|
||||
div
|
||||
span.u-text-label.u-text-label--light=label
|
||||
!=headline
|
||||
|
||||
mixin landing-badge(url, graphic, alt, size)
|
||||
+a(url)(aria-label=alt title=alt).c-landing__badge
|
||||
+svg("graphics", graphic, size || 225)
|
||||
+grid-col("two-thirds").c-landing__banner__text
|
||||
block
|
||||
|
||||
|
||||
mixin landing-logos(title, logos)
|
||||
.o-content.u-text-center&attributes(attributes)
|
||||
h3.u-heading.u-text-label.u-color-dark=title
|
||||
|
||||
each row, i in logos
|
||||
- var is_last = i == logos.length - 1
|
||||
+grid("center").o-inline-list.o-no-block(class=is_last ? "o-no-block" : null)
|
||||
each details, name in row
|
||||
+a(details[0]).u-padding-medium
|
||||
+icon(name, details[1], details[2])
|
||||
|
||||
if is_last
|
||||
block
|
||||
|
||||
|
||||
//- Under construction (temporary)
|
||||
Marks sections that still need to be completed for the v2.0 release.
|
||||
|
||||
mixin under-construction()
|
||||
+infobox("🚧 Under construction")
|
||||
+infobox("Under construction", "🚧")
|
||||
| This section is still being written and will be updated for the v2.0
|
||||
| release. Is there anything that you think should definitely mentioned or
|
||||
| explained here? Any examples you'd like to see? #[strong Let us know]
|
||||
| on the #[+a(gh("spacy") + "/issues/1105") v2.0 alpha thread] on GitHub!
|
||||
|
||||
|
||||
//- Alpha infobox (temporary)
|
||||
Added in the templates to notify user that they're visiting the alpha site.
|
||||
|
||||
mixin alpha-info()
|
||||
+infobox("You are viewing the spaCy v2.0.0 alpha docs", "⚠️")
|
||||
strong This page is part of the alpha documentation for spaCy v2.0.
|
||||
| It does not reflect the state of the latest stable release.
|
||||
| Because v2.0 is still under development, the implementation
|
||||
| may differ from the intended state described here. See the
|
||||
| #[+a(gh("spaCy") + "/releases/tag/v2.0.0-alpha") release notes]
|
||||
| for details on how to install and test the new version. To
|
||||
| read the official docs for spaCy v1.x,
|
||||
| #[+a("https://spacy.io/docs") go here].
|
||||
|
|
|
@ -8,11 +8,15 @@ include _mixins-base
|
|||
level - [integer] headline level, corresponds to h1, h2, h3 etc.
|
||||
id - [string] unique identifier, creates permalink (optional)
|
||||
|
||||
mixin h(level, id)
|
||||
+headline(level).u-heading&attributes(attributes)
|
||||
mixin h(level, id, source)
|
||||
+headline(level).u-heading(id=id)&attributes(attributes)
|
||||
+permalink(id)
|
||||
block
|
||||
|
||||
if source
|
||||
+button(gh("spacy", source), false, "secondary", "small").u-nowrap.u-float-right
|
||||
span Source #[+icon("code", 14).o-icon--inline]
|
||||
|
||||
|
||||
//- External links
|
||||
url - [string] link href
|
||||
|
@ -38,21 +42,23 @@ mixin src(url)
|
|||
|
||||
|
||||
//- API link (with added tag and automatically generated path)
|
||||
path - [string] path to API docs page relative to /docs/api/
|
||||
path - [string] path to API docs page relative to /api/
|
||||
|
||||
mixin api(path)
|
||||
+a("/docs/api/" + path, true)(target="_self").u-no-border.u-inline-block.u-nowrap
|
||||
- path = convertAPIPath(path)
|
||||
+a("/api/" + path, true)(target="_self").u-no-border.u-inline-block.u-nowrap
|
||||
block
|
||||
|
||||
| #[+icon("book", 18).o-icon--inline.u-color-theme]
|
||||
| #[+icon("book", 16).o-icon--inline.u-color-theme]
|
||||
|
||||
|
||||
//- Help icon with tooltip
|
||||
tooltip - [string] Tooltip text
|
||||
icon_size - [integer] Optional size of help icon in px.
|
||||
|
||||
mixin help(tooltip)
|
||||
mixin help(tooltip, icon_size)
|
||||
span(data-tooltip=tooltip)&attributes(attributes)
|
||||
+icon("help", 16).i-icon--inline
|
||||
+icon("help", icon_size || 16).o-icon--inline
|
||||
|
||||
|
||||
//- Aside for text
|
||||
|
@ -68,24 +74,43 @@ mixin aside(label)
|
|||
label - [string] aside title (optional or false for no label)
|
||||
language - [string] language for syntax highlighting (default: "python")
|
||||
supports basic relevant languages available for PrismJS
|
||||
prompt - [string] prompt displayed before first line, e.g. "$"
|
||||
|
||||
mixin aside-code(label, language)
|
||||
mixin aside-code(label, language, prompt)
|
||||
+aside-wrapper(label)
|
||||
+code(false, language).o-no-block
|
||||
+code(false, language, prompt).o-no-block
|
||||
block
|
||||
|
||||
|
||||
//- Infobox
|
||||
label - [string] infobox title (optional or false for no title)
|
||||
emoji - [string] optional emoji displayed before the title, necessary as
|
||||
argument to be able to wrap it for spacing
|
||||
|
||||
mixin infobox(label)
|
||||
mixin infobox(label, emoji)
|
||||
aside.o-box.o-block.u-text-small
|
||||
if label
|
||||
h3.u-text-label.u-color-theme=label
|
||||
h3.u-heading.u-text-label.u-color-theme
|
||||
if emoji
|
||||
span.o-emoji=emoji
|
||||
| #{label}
|
||||
|
||||
block
|
||||
|
||||
|
||||
//- Logos displayed in the top corner of some infoboxes
|
||||
logos - [array] List of icon ID, width, height and link.
|
||||
|
||||
mixin infobox-logos(...logos)
|
||||
.o-box__logos.u-text-right.u-float-right
|
||||
for logo in logos
|
||||
if logo[3]
|
||||
| #[+a(logo[3]).u-inline-block.u-hide-link.u-padding-small #[+icon(logo[0], logo[1], logo[2]).u-color-dark]]
|
||||
else
|
||||
| #[+icon(logo[0], logo[1], logo[2]).u-color-dark]
|
||||
|
||||
|
||||
|
||||
//- Link button
|
||||
url - [string] link href
|
||||
trusted - [boolean] if not set / false, rel="noopener nofollow" is added
|
||||
|
@ -94,7 +119,7 @@ mixin infobox(label)
|
|||
see assets/css/_components/_buttons.sass
|
||||
|
||||
mixin button(url, trusted, ...style)
|
||||
- external = url.includes("http")
|
||||
- external = url && url.includes("http")
|
||||
a.c-button.u-text-label(href=url class=prefixArgs(style, "c-button") role="button" target=external ? "_blank" : null rel=external && !trusted ? "noopener nofollow" : null)&attributes(attributes)
|
||||
block
|
||||
|
||||
|
@ -103,31 +128,33 @@ mixin button(url, trusted, ...style)
|
|||
label - [string] aside title (optional or false for no label)
|
||||
language - [string] language for syntax highlighting (default: "python")
|
||||
supports basic relevant languages available for PrismJS
|
||||
prompt - [string] prompt or icon to display next to code block, (mostly used for old/new)
|
||||
prompt - [string] prompt displayed before first line, e.g. "$"
|
||||
height - [integer] optional height to clip code block to
|
||||
icon - [string] icon displayed next to code block (e.g. "accept" for new code)
|
||||
wrap - [boolean] wrap text and disable horizontal scrolling
|
||||
|
||||
mixin code(label, language, prompt, height)
|
||||
mixin code(label, language, prompt, height, icon, wrap)
|
||||
pre.c-code-block.o-block(class="lang-#{(language || DEFAULT_SYNTAX)}" class=icon ? "c-code-block--has-icon" : null style=height ? "height: #{height}px" : null)&attributes(attributes)
|
||||
if label
|
||||
h4.u-text-label.u-text-label--dark=label
|
||||
- var icon = (prompt == 'accept' || prompt == 'reject')
|
||||
- var icon = icon || (prompt == 'accept' || prompt == 'reject')
|
||||
if icon
|
||||
- var classes = {'accept': 'u-color-green', 'reject': 'u-color-red'}
|
||||
.c-code-block__icon(class=classes[icon] || null class=classes[icon] ? "c-code-block__icon--border" : null)
|
||||
+icon(icon, 18)
|
||||
|
||||
code.c-code-block__content(data-prompt=icon ? null : prompt)
|
||||
code.c-code-block__content(class=wrap ? "u-wrap" : null data-prompt=icon ? null : prompt)
|
||||
block
|
||||
|
||||
|
||||
//- Code blocks to display old/new versions
|
||||
|
||||
mixin code-old()
|
||||
+code(false, false, "reject").o-block-small
|
||||
+code(false, false, false, false, "reject").o-block-small
|
||||
block
|
||||
|
||||
mixin code-new()
|
||||
+code(false, false, "accept").o-block-small
|
||||
+code(false, false, false, false, "accept").o-block-small
|
||||
block
|
||||
|
||||
|
||||
|
@ -138,12 +165,33 @@ mixin code-new()
|
|||
|
||||
mixin codepen(slug, height, default_tab)
|
||||
figure.o-block(style="min-height: #{height}px")&attributes(attributes)
|
||||
.codepen(data-height=height data-theme-id="26467" data-slug-hash=slug data-default-tab=(default_tab || "result") data-embed-version="2" data-user=SOCIAL.codepen)
|
||||
.codepen(data-height=height data-theme-id="31335" data-slug-hash=slug data-default-tab=(default_tab || "result") data-embed-version="2" data-user=SOCIAL.codepen)
|
||||
+a("https://codepen.io/" + SOCIAL.codepen + "/" + slug) View on CodePen
|
||||
|
||||
script(async src="https://assets.codepen.io/assets/embed/ei.js")
|
||||
|
||||
|
||||
//- GitHub embed
|
||||
repo - [string] repository owned by explosion organization
|
||||
file - [string] logical path to file, relative to repository root
|
||||
alt_file - [string] alternative file path used in footer and link button
|
||||
height - [integer] height of code preview in px
|
||||
|
||||
mixin github(repo, file, alt_file, height)
|
||||
- var branch = ALPHA ? "develop" : "master"
|
||||
- var height = height || 250
|
||||
|
||||
figure.o-block
|
||||
pre.c-code-block.o-block-small(class="lang-#{(language || DEFAULT_SYNTAX)}" style="height: #{height}px; min-height: #{height}px")
|
||||
code.c-code-block__content(data-gh-embed="#{repo}/#{branch}/#{file}")
|
||||
|
||||
footer.o-grid.u-text
|
||||
.o-block-small.u-flex-full #[+icon("github")] #[code=repo + '/' + (alt_file || file)]
|
||||
div
|
||||
+button(gh(repo, alt_file || file), false, "primary", "small") View on GitHub
|
||||
|
||||
|
||||
|
||||
//- Images / figures
|
||||
url - [string] url or path to image
|
||||
width - [integer] image width in px, for better rendering (default: 500)
|
||||
|
@ -168,10 +216,26 @@ mixin image-caption()
|
|||
block
|
||||
|
||||
|
||||
//- Label
|
||||
//- Graphic or illustration with button
|
||||
original - [string] Path to original image
|
||||
|
||||
mixin graphic(original)
|
||||
+image
|
||||
block
|
||||
if original
|
||||
.u-text-right
|
||||
+button(original, false, "secondary", "small") View large graphic
|
||||
|
||||
|
||||
//- Labels
|
||||
|
||||
mixin label()
|
||||
.u-text-label.u-color-subtle&attributes(attributes)
|
||||
.u-text-label.u-color-dark&attributes(attributes)
|
||||
block
|
||||
|
||||
|
||||
mixin label-inline()
|
||||
strong.u-text-label.u-color-dark&attributes(attributes)
|
||||
block
|
||||
|
||||
|
||||
|
@ -188,7 +252,9 @@ mixin tag()
|
|||
mixin tag-model(...capabs)
|
||||
- var intro = "To use this functionality, spaCy needs a model to be installed"
|
||||
- var ext = capabs.length ? " that supports the following capabilities: " + capabs.join(', ') : ""
|
||||
+tag Requires model
|
||||
|
||||
span.u-nowrap
|
||||
+tag Needs model
|
||||
+help(intro + ext + ".").u-color-theme
|
||||
|
||||
|
||||
|
@ -219,13 +285,7 @@ mixin list(type, start)
|
|||
|
||||
//- List item (only used within +list)
|
||||
|
||||
mixin item(procon)
|
||||
if procon
|
||||
li&attributes(attributes)
|
||||
+procon(procon).c-list__icon
|
||||
block
|
||||
|
||||
else
|
||||
mixin item()
|
||||
li.c-list__item&attributes(attributes)
|
||||
block
|
||||
|
||||
|
@ -237,9 +297,9 @@ mixin table(head)
|
|||
table.c-table.o-block&attributes(attributes)
|
||||
|
||||
if head
|
||||
+row
|
||||
+row("head")
|
||||
each column in head
|
||||
th.c-table__head-cell.u-text-label=column
|
||||
+head-cell=column
|
||||
|
||||
block
|
||||
|
||||
|
@ -251,10 +311,11 @@ mixin row(...style)
|
|||
block
|
||||
|
||||
|
||||
//- Footer table row (only ued within +table)
|
||||
|
||||
mixin footrow()
|
||||
tr.c-table__row.c-table__row--foot&attributes(attributes)
|
||||
//- Header table cell (only used within +row)
|
||||
|
||||
mixin head-cell()
|
||||
th.c-table__head-cell.u-text-label&attributes(attributes)
|
||||
block
|
||||
|
||||
|
||||
|
@ -285,70 +346,57 @@ mixin grid-col(width)
|
|||
|
||||
//- Card (only used within +grid)
|
||||
title - [string] card title
|
||||
details - [object] url, image, author, description, tags etc.
|
||||
(see /docs/usage/_data.json)
|
||||
|
||||
mixin card(title, details)
|
||||
+grid-col("half").o-card.u-text&attributes(attributes)
|
||||
if details.image
|
||||
+a(details.url).o-block-small
|
||||
img(src=details.image alt=title width="300" role="presentation")
|
||||
url - [string] link for card
|
||||
author - [string] optional author, displayed as byline at the bottom
|
||||
icon - [string] optional ID of icon displayed with card
|
||||
width - [string] optional width of grid column, defaults to "half"
|
||||
|
||||
mixin card(title, url, author, icon, width)
|
||||
+grid-col(width || "half").o-box.o-grid.o-grid--space.u-text&attributes(attributes)
|
||||
+a(url)
|
||||
h4.u-heading.u-text-label
|
||||
if icon
|
||||
+icon(icon, 25).u-float-right
|
||||
if title
|
||||
+a(details.url)
|
||||
+h(3)=title
|
||||
span.u-color-dark=title
|
||||
.o-block-small.u-text-small
|
||||
block
|
||||
if author
|
||||
.u-color-subtle.u-text-tiny by #{author}
|
||||
|
||||
if details.author
|
||||
.u-text-small.u-color-subtle by #{details.author}
|
||||
|
||||
if details.description || details.tags
|
||||
ul
|
||||
if details.description
|
||||
li=details.description
|
||||
|
||||
if details.tags
|
||||
li
|
||||
each tag in details.tags
|
||||
span.u-text-tag #{tag}
|
||||
|
|
||||
//- Table of contents, to be used with +item mixins for links
|
||||
col - [string] width of column (see +grid-col)
|
||||
|
||||
mixin table-of-contents(col)
|
||||
+grid-col(col || "half")
|
||||
+infobox
|
||||
+label.o-block-small Table of contents
|
||||
+list("numbers").u-text-small.o-no-block
|
||||
block
|
||||
|
||||
|
||||
//- Simpler card list item (only used within +list)
|
||||
title - [string] card title
|
||||
details - [object] url, image, author, description, tags etc.
|
||||
(see /docs/usage/_data.json)
|
||||
//- Bibliography
|
||||
id - [string] ID of bibliography component, for anchor links. Can be used if
|
||||
there's more than one bibliography on one page.
|
||||
|
||||
mixin card-item(title, details)
|
||||
+item&attributes(attributes)
|
||||
+a(details.url)=title
|
||||
|
||||
if details.description
|
||||
br
|
||||
span=details.description
|
||||
|
||||
if details.author
|
||||
br
|
||||
span.u-text-small.u-color-subtle by #{details.author}
|
||||
mixin bibliography(id)
|
||||
section(id=id || "bibliography")
|
||||
+infobox
|
||||
+label.o-block-small Bibliography
|
||||
+list("numbers").u-text-small.o-no-block
|
||||
block
|
||||
|
||||
|
||||
//- Table row for models table
|
||||
//- Footnote
|
||||
id - [string / integer] ID of footnote.
|
||||
bib_id - [string] ID of bibliography component, defaults to "bibliography".
|
||||
tooltip - [string] optional text displayed as tooltip
|
||||
|
||||
mixin model-row(name, lang, procon, size, license, default_model, divider)
|
||||
- var licenses = { "CC BY-SA": "https://creativecommons.org/licenses/by-sa/3.0/", "CC BY-NC": "https://creativecommons.org/licenses/by-nc/3.0/" }
|
||||
|
||||
+row(divider ? "divider": null)
|
||||
+cell #[code=name]
|
||||
if default_model
|
||||
| #[span.u-color-theme(title="default model") #[+icon("star", 16)]]
|
||||
+cell=lang
|
||||
each icon in procon
|
||||
+cell.u-text-center #[+procon(icon ? "pro" : "con")]
|
||||
+cell.u-text-right=size
|
||||
+cell
|
||||
if license in licenses
|
||||
+a(licenses[license])=license
|
||||
mixin fn(id, bib_id, tooltip)
|
||||
sup.u-padding-small(id="bib" + id data-tooltip=tooltip)
|
||||
span.u-text-tag
|
||||
+a("#" + (bib_id || "bibliography")).u-hide-link #{id}
|
||||
|
||||
|
||||
//- Table rows for annotation specs
|
||||
|
@ -383,14 +431,3 @@ mixin annotation-row(annots, style)
|
|||
else
|
||||
+cell=cell
|
||||
block
|
||||
|
||||
|
||||
//- Table of contents, to be used with +item mixins for links
|
||||
col - [string] width of column (see +grid-col)
|
||||
|
||||
mixin table-of-contents(col)
|
||||
+grid-col(col || "half")
|
||||
+infobox
|
||||
+label.o-block-small Table of contents
|
||||
+list("numbers").u-text-small.o-no-block
|
||||
block
|
||||
|
|
|
@ -1,19 +1,15 @@
|
|||
//- 💫 INCLUDES > TOP NAVIGATION
|
||||
|
||||
include _mixins
|
||||
|
||||
nav.c-nav.u-text.js-nav(class=landing ? "c-nav--theme" : null)
|
||||
a(href='/') #[+logo]
|
||||
|
||||
if SUBSECTION != "index"
|
||||
.u-text-label.u-padding-small.u-hidden-xs=SUBSECTION
|
||||
a(href="/" aria-label=SITENAME) #[+logo]
|
||||
|
||||
ul.c-nav__menu
|
||||
- var NAV = ALPHA ? { "Usage": "/docs/usage", "Reference": "/docs/api" } : NAVIGATION
|
||||
|
||||
each url, item in NAV
|
||||
li.c-nav__menu__item(class=(url == "/") ? "u-hidden-xs" : null)
|
||||
- var current_url = '/' + current.path[0]
|
||||
each url, item in NAVIGATION
|
||||
li.c-nav__menu__item(class=(current_url == url) ? "is-active" : null)
|
||||
+a(url)=item
|
||||
|
||||
li.c-nav__menu__item
|
||||
+a(gh("spaCy"))(aria-label="GitHub").u-hidden-xs #[+icon("github", 20)]
|
||||
li.c-nav__menu__item.u-hidden-xs
|
||||
+a(gh("spaCy"))(aria-label="GitHub") #[+icon("github", 20)]
|
||||
|
||||
progress.c-progress.js-progress(value="0" max="1")
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
//- 💫 INCLUDES > NEWSLETTER
|
||||
|
||||
ul.o-block
|
||||
ul.o-block-small
|
||||
li.u-text-label.u-color-subtle Stay in the loop!
|
||||
li Receive updates about new releases, tutorials and more.
|
||||
|
||||
|
@ -10,7 +10,6 @@ form.o-grid#mc-embedded-subscribe-form(action="//#{MAILCHIMP.user}.list-manage.c
|
|||
div(style="position: absolute; left: -5000px;" aria-hidden="true")
|
||||
input(type="text" name="b_#{MAILCHIMP.id}_#{MAILCHIMP.list}" tabindex="-1" value="")
|
||||
|
||||
.o-grid-col.u-border.u-padding-small
|
||||
input#mce-EMAIL.u-text(type="email" name="EMAIL" placeholder="Your email")
|
||||
|
||||
button#mc-embedded-subscribe.u-text-label.u-color-theme(type="submit" name="subscribe") Sign up
|
||||
.o-grid-col.o-grid.o-grid--nowrap.o-field.u-padding-small
|
||||
input#mce-EMAIL.o-field__input.u-text(type="email" name="EMAIL" placeholder="Your email" aria-label="Your email")
|
||||
button#mc-embedded-subscribe.o-field__button.u-text-label.u-color-theme.u-nowrap(type="submit" name="subscribe") Sign up
|
||||
|
|
|
@ -1,47 +1,56 @@
|
|||
//- 💫 INCLUDES > DOCS PAGE TEMPLATE
|
||||
|
||||
- sidebar_content = (SUBSECTION != "index") ? public.docs[SUBSECTION]._data.sidebar : public.docs._data.sidebar || FOOTER
|
||||
- sidebar_content = (public[SECTION] ? public[SECTION]._data.sidebar : public._data[SECTION] ? public._data[SECTION].sidebar : false) || FOOTER
|
||||
|
||||
include _sidebar
|
||||
|
||||
main.o-main.o-main--sidebar.o-main--aside
|
||||
article.o-content
|
||||
+grid.o-no-block
|
||||
+grid-col(source ? "two-thirds" : "full")
|
||||
+h(1)=title
|
||||
+h(1).u-heading--title=title.replace("'", "’")
|
||||
if tag
|
||||
+tag=tag
|
||||
if tag_new
|
||||
+tag-new(tag_new)
|
||||
|
||||
if teaser
|
||||
.u-heading__teaser.u-text-small.u-color-dark=teaser
|
||||
else if IS_MODELS
|
||||
.u-heading__teaser.u-text-small.u-color-dark
|
||||
| Available statistical models for
|
||||
| #[code=current.source] (#{LANGUAGES[current.source]}).
|
||||
|
||||
if source
|
||||
+grid-col("third").u-text-right
|
||||
.o-inline-list
|
||||
+button(gh("spacy", source), false, "secondary").u-text-tag Source #[+icon("code", 14)]
|
||||
.o-block.u-text-right
|
||||
+button(gh("spacy", source), false, "secondary", "small").u-nowrap
|
||||
| Source #[+icon("code", 14)]
|
||||
|
||||
//-if ALPHA
|
||||
//- +alpha-info
|
||||
|
||||
if ALPHA
|
||||
+infobox("⚠️ You are viewing the spaCy v2.0.0 alpha docs")
|
||||
strong This page is part of the alpha documentation for spaCy v2.0.
|
||||
| It does not reflect the state of the latest stable release.
|
||||
| Because v2.0 is still under development, the implementation
|
||||
| may differ from the intended state described here. See the
|
||||
| #[+a(gh("spaCy") + "/releases/tag/v2.0.0-alpha") release notes]
|
||||
| for details on how to install and test the new version. To
|
||||
| read the official docs for spaCy v1.x,
|
||||
| #[+a("https://spacy.io/docs") go here].
|
||||
|
||||
if IS_MODELS
|
||||
include _page_models
|
||||
else
|
||||
!=yield
|
||||
|
||||
+grid.o-content.u-text
|
||||
+grid-col("half")
|
||||
if next && public.docs[SUBSECTION]._data[next]
|
||||
- data = public.docs[SUBSECTION]._data[next]
|
||||
|
||||
if !IS_MODELS
|
||||
.o-inline-list
|
||||
span #[strong.u-text-label Read next:] #[+a(next).u-link=data.title]
|
||||
+button(gh("spacy", "website/" + current.path.join('/') + ".jade"), false, "secondary", "small")
|
||||
| #[span.o-icon Suggest edits] #[+icon("code", 14)]
|
||||
|
||||
+grid-col("half").u-text-right
|
||||
.o-inline-list
|
||||
+button(gh("spacy", "website/" + current.path.join('/') + ".jade"), false, "secondary").u-text-tag Suggest edits #[+icon("code", 14)]
|
||||
if next && public[SECTION]._data[next]
|
||||
- data = public[SECTION]._data[next]
|
||||
|
||||
+grid("vcenter")
|
||||
+a(next).u-text-small.u-flex-full
|
||||
h4.u-text-label.u-color-dark Read next
|
||||
| #{data.title}
|
||||
|
||||
+a(next).c-icon-button.c-icon-button--right(aria-hidden="true")
|
||||
+icon("arrow-right", 24)
|
||||
|
||||
+gitter("spaCy chat")
|
||||
|
||||
|
|
77
website/_includes/_page_models.jade
Normal file
77
website/_includes/_page_models.jade
Normal file
|
@ -0,0 +1,77 @@
|
|||
//- 💫 INCLUDES > MODELS PAGE TEMPLATE
|
||||
|
||||
for id in CURRENT_MODELS
|
||||
+section(id)
|
||||
+grid("vcenter").o-no-block(id=id)
|
||||
+grid-col("two-thirds")
|
||||
+h(2)
|
||||
+a("#" + id).u-permalink=id
|
||||
|
||||
+grid-col("third").u-text-right
|
||||
.u-color-subtle.u-text-tiny
|
||||
+button(gh("spacy-models") + "/releases", true, "secondary", "small")(data-tpl=id data-tpl-key="download")
|
||||
| Release details
|
||||
.u-padding-small Latest: #[code(data-tpl=id data-tpl-key="version") n/a]
|
||||
|
||||
+aside-code("Installation", "bash", "$").
|
||||
spacy download #{id}
|
||||
|
||||
- var comps = getModelComponents(id)
|
||||
|
||||
p(data-tpl=id data-tpl-key="description")
|
||||
|
||||
div(data-tpl=id data-tpl-key="error" style="display: none")
|
||||
+infobox
|
||||
| Unable to load model details from GitHub. To find out more
|
||||
| about this model, see the overview of the
|
||||
| #[+a(gh("spacy-models") + "/releases") latest model releases].
|
||||
|
||||
+table(data-tpl=id data-tpl-key="table")
|
||||
+row
|
||||
+cell #[+label Language]
|
||||
+cell #[+tag=comps.lang] #{LANGUAGES[comps.lang]}
|
||||
for comp, label in {"Type": comps.type, "Genre": comps.genre}
|
||||
+row
|
||||
+cell #[+label=label]
|
||||
+cell #[+tag=comp] #{MODEL_META[comp]}
|
||||
+row
|
||||
+cell #[+label Size]
|
||||
+cell #[+tag=comps.size] #[span(data-tpl=id data-tpl-key="size") #[em n/a]]
|
||||
|
||||
each label in ["Pipeline", "Sources", "Author", "License"]
|
||||
- var field = label.toLowerCase()
|
||||
+row
|
||||
+cell.u-nowrap
|
||||
+label=label
|
||||
if MODEL_META[field]
|
||||
| #[+help(MODEL_META[field]).u-color-subtle]
|
||||
+cell
|
||||
span(data-tpl=id data-tpl-key=field) #[em n/a]
|
||||
|
||||
+row(data-tpl=id data-tpl-key="compat-wrapper" style="display: none")
|
||||
+cell
|
||||
+label Compat #[+help("Latest compatible model version for your spaCy installation").u-color-subtle]
|
||||
+cell
|
||||
.o-field.u-float-left
|
||||
select.o-field__select.u-text-small(data-tpl=id data-tpl-key="compat")
|
||||
.o-empty(data-tpl=id data-tpl-key="compat-versions")
|
||||
|
||||
section(data-tpl=id data-tpl-key="accuracy-wrapper" style="display: none")
|
||||
+grid.o-no-block
|
||||
+grid-col("third")
|
||||
+h(4) Accuracy
|
||||
+table.o-block-small
|
||||
for label, field in MODEL_ACCURACY
|
||||
+row(style="display: none")
|
||||
+cell.u-nowrap
|
||||
+label=label
|
||||
if MODEL_META[field]
|
||||
| #[+help(MODEL_META[field]).u-color-subtle]
|
||||
+cell.u-text-right(data-tpl=id data-tpl-key=field)
|
||||
| n/a
|
||||
|
||||
+grid-col("two-thirds")
|
||||
+h(4) Comparison
|
||||
+chart(id).u-padding-small
|
||||
|
||||
p.u-text-small.u-color-dark(data-tpl=id data-tpl-key="notes")
|
|
@ -1,27 +1,46 @@
|
|||
//- 💫 INCLUDES > SCRIPTS
|
||||
|
||||
script(src="/assets/js/main.js?v#{V_JS}")
|
||||
script(src="/assets/js/prism.js")
|
||||
if quickstart
|
||||
script(src="/assets/js/quickstart.min.js")
|
||||
|
||||
if SECTION == "docs"
|
||||
if quickstart
|
||||
script(src="/assets/js/quickstart.js")
|
||||
script var qs = new Quickstart("#qs")
|
||||
if IS_PAGE
|
||||
script(src="/assets/js/in-view.min.js")
|
||||
|
||||
script.
|
||||
((window.gitter = {}).chat = {}).options = {
|
||||
useStyles: false,
|
||||
activationElement: '.js-gitter-button',
|
||||
targetElement: '.js-gitter',
|
||||
room: '!{SOCIAL.gitter}'
|
||||
};
|
||||
|
||||
script(src="https://sidecar.gitter.im/dist/sidecar.v1.js" async defer)
|
||||
if HAS_MODELS
|
||||
script(src="/assets/js/chart.min.js")
|
||||
|
||||
if environment == "deploy"
|
||||
script
|
||||
script(async src="https://www.google-analytics.com/analytics.js")
|
||||
|
||||
script(src="/assets/js/prism.min.js")
|
||||
script(src="/assets/js/main.js?v#{V_JS}")
|
||||
|
||||
script
|
||||
| new ProgressBar('.js-progress');
|
||||
|
||||
if changelog
|
||||
| new Changelog('!{SOCIAL.github}', 'spacy');
|
||||
|
||||
if quickstart
|
||||
| new Quickstart("#qs");
|
||||
|
||||
if IS_PAGE
|
||||
| new SectionHighlighter('data-section', 'data-nav');
|
||||
| new GitHubEmbed('!{SOCIAL.github}', 'data-gh-embed');
|
||||
| ((window.gitter = {}).chat = {}).options = {
|
||||
| useStyles: false,
|
||||
| activationElement: '.js-gitter-button',
|
||||
| targetElement: '.js-gitter',
|
||||
| room: '!{SOCIAL.gitter}'
|
||||
| };
|
||||
|
||||
if HAS_MODELS
|
||||
| new ModelLoader('!{MODELS_REPO}', !{JSON.stringify(CURRENT_MODELS)}, !{JSON.stringify(MODEL_LICENSES)}, !{JSON.stringify(MODEL_ACCURACY)});
|
||||
|
||||
if environment == "deploy"
|
||||
| window.ga=window.ga||function(){
|
||||
| (ga.q=ga.q||[]).push(arguments)}; ga.l=+new Date;
|
||||
| ga('create', '#{ANALYTICS}', 'auto'); ga('send', 'pageview');
|
||||
|
||||
script(async src="https://www.google-analytics.com/analytics.js")
|
||||
if IS_PAGE
|
||||
script(src="https://sidecar.gitter.im/dist/sidecar.v1.js" async defer)
|
||||
|
|
|
@ -1,13 +1,23 @@
|
|||
//- 💫 INCLUDES > SIDEBAR
|
||||
|
||||
include _mixins
|
||||
|
||||
menu.c-sidebar.js-sidebar.u-text
|
||||
if sidebar_content
|
||||
each items, menu in sidebar_content
|
||||
ul.c-sidebar__section.o-block
|
||||
li.u-text-label.u-color-subtle=menu
|
||||
each items, sectiontitle in sidebar_content
|
||||
ul.c-sidebar__section.o-block-small
|
||||
li.u-text-label.u-color-dark=sectiontitle
|
||||
|
||||
each url, item in items
|
||||
li(class=(CURRENT == url || (CURRENT == "index" && url == "./")) ? "is-active" : null)
|
||||
+a(url)=item
|
||||
- var is_current = CURRENT == url || (CURRENT == "index" && url == "./")
|
||||
li.c-sidebar__item
|
||||
+a(url)(class=is_current ? "is-active" : null)=item
|
||||
|
||||
if is_current
|
||||
if IS_MODELS && CURRENT_MODELS.length
|
||||
- menu = Object.assign({}, ...CURRENT_MODELS.map(id => ({ [id]: id })))
|
||||
if menu
|
||||
ul.c-sidebar__crumb.u-hidden-sm
|
||||
- var counter = 0
|
||||
for id, title in menu
|
||||
- counter++
|
||||
li.c-sidebar__crumb__item(data-nav=id class=(counter == 1) ? "is-active" : null)
|
||||
+a("#section-" + id)=title
|
||||
|
|
157
website/_includes/_svg.jade
Normal file
157
website/_includes/_svg.jade
Normal file
File diff suppressed because one or more lines are too long
|
@ -2,11 +2,16 @@
|
|||
|
||||
include _includes/_mixins
|
||||
|
||||
- title = IS_MODELS ? LANGUAGES[current.source] || title : title
|
||||
- social_title = (SECTION == "index") ? SITENAME + " - " + SLOGAN : title + " - " + SITENAME
|
||||
- social_img = SITE_URL + "/assets/img/social/preview_" + (preview || ALPHA ? "alpha" : "default") + ".jpg"
|
||||
|
||||
doctype html
|
||||
html(lang="en")
|
||||
title
|
||||
if SECTION == "docs" && SUBSECTION && SUBSECTION != "index"
|
||||
| #{title} | #{SITENAME} #{SUBSECTION == "api" ? "API" : "Usage"} Documentation
|
||||
if SECTION == "api" || SECTION == "usage" || SECTION == "models"
|
||||
- var title_section = (SECTION == "api") ? "API" : SECTION.charAt(0).toUpperCase() + SECTION.slice(1)
|
||||
| #{title} | #{SITENAME} #{title_section} Documentation
|
||||
|
||||
else if SECTION != "index"
|
||||
| #{title} | #{SITENAME}
|
||||
|
@ -22,32 +27,30 @@ html(lang="en")
|
|||
meta(property="og:type" content="website")
|
||||
meta(property="og:site_name" content=sitename)
|
||||
meta(property="og:url" content="#{SITE_URL}/#{current.path.join('/')}")
|
||||
meta(property="og:title" content="#{title} - spaCy")
|
||||
meta(property="og:title" content=social_title)
|
||||
meta(property="og:description" content=description)
|
||||
meta(property="og:image" content=getSocialImg())
|
||||
meta(property="og:image" content=social_img)
|
||||
|
||||
meta(name="twitter:card" content="summary_large_image")
|
||||
meta(name="twitter:site" content="@" + SOCIAL.twitter)
|
||||
meta(name="twitter:title" content="#{title} - spaCy")
|
||||
meta(name="twitter:title" content=social_title)
|
||||
meta(name="twitter:description" content=description)
|
||||
meta(name="twitter:image" content=getSocialImg())
|
||||
meta(name="twitter:image" content=social_img)
|
||||
|
||||
link(rel="shortcut icon" href="/assets/img/favicon.ico")
|
||||
link(rel="icon" type="image/x-icon" href="/assets/img/favicon.ico")
|
||||
|
||||
if ALPHA && SECTION == "docs"
|
||||
if SECTION == "api"
|
||||
link(href="/assets/css/style_green.css?v#{V_CSS}" rel="stylesheet")
|
||||
|
||||
else if SUBSECTION == "usage"
|
||||
link(href="/assets/css/style_red.css?v#{V_CSS}" rel="stylesheet")
|
||||
|
||||
else
|
||||
link(href="/assets/css/style.css?v#{V_CSS}" rel="stylesheet")
|
||||
|
||||
body
|
||||
include _includes/_svg
|
||||
include _includes/_navigation
|
||||
|
||||
if SECTION == "docs"
|
||||
if !landing
|
||||
include _includes/_page-docs
|
||||
|
||||
else
|
||||
|
|
43
website/api/_annotation/_biluo.jade
Normal file
43
website/api/_annotation/_biluo.jade
Normal file
|
@ -0,0 +1,43 @@
|
|||
//- 💫 DOCS > API > ANNOTATION > BILUO
|
||||
|
||||
+table([ "Tag", "Description" ])
|
||||
+row
|
||||
+cell #[code #[span.u-color-theme B] EGIN]
|
||||
+cell The first token of a multi-token entity.
|
||||
|
||||
+row
|
||||
+cell #[code #[span.u-color-theme I] N]
|
||||
+cell An inner token of a multi-token entity.
|
||||
|
||||
+row
|
||||
+cell #[code #[span.u-color-theme L] AST]
|
||||
+cell The final token of a multi-token entity.
|
||||
|
||||
+row
|
||||
+cell #[code #[span.u-color-theme U] NIT]
|
||||
+cell A single-token entity.
|
||||
|
||||
+row
|
||||
+cell #[code #[span.u-color-theme O] UT]
|
||||
+cell A non-entity token.
|
||||
|
||||
+aside("Why BILUO, not IOB?")
|
||||
| There are several coding schemes for encoding entity annotations as
|
||||
| token tags. These coding schemes are equally expressive, but not
|
||||
| necessarily equally learnable.
|
||||
| #[+a("http://www.aclweb.org/anthology/W09-1119") Ratinov and Roth]
|
||||
| showed that the minimal #[strong Begin], #[strong In], #[strong Out]
|
||||
| scheme was more difficult to learn than the #[strong BILUO] scheme that
|
||||
| we use, which explicitly marks boundary tokens.
|
||||
|
||||
p
|
||||
| spaCy translates the character offsets into this scheme, in order to
|
||||
| decide the cost of each action given the current state of the entity
|
||||
| recogniser. The costs are then used to calculate the gradient of the
|
||||
| loss, to train the model. The exact algorithm is a pastiche of
|
||||
| well-known methods, and is not currently described in any single
|
||||
| publication. The model is a greedy transition-based parser guided by a
|
||||
| linear model whose weights are learned using the averaged perceptron
|
||||
| loss, via the #[+a("http://www.aclweb.org/anthology/C12-1059") dynamic oracle]
|
||||
| imitation learning strategy. The transition system is equivalent to the
|
||||
| BILOU tagging scheme.
|
115
website/api/_architecture/_cython.jade
Normal file
115
website/api/_architecture/_cython.jade
Normal file
|
@ -0,0 +1,115 @@
|
|||
//- 💫 DOCS > API > ARCHITECTURE > CYTHON
|
||||
|
||||
+aside("What's Cython?")
|
||||
| #[+a("http://cython.org/") Cython] is a language for writing
|
||||
| C extensions for Python. Most Python code is also valid Cython, but
|
||||
| you can add type declarations to get efficient memory-managed code
|
||||
| just like C or C++.
|
||||
|
||||
p
|
||||
| spaCy's core data structures are implemented as
|
||||
| #[+a("http://cython.org/") Cython] #[code cdef] classes. Memory is
|
||||
| managed through the #[+a(gh("cymem")) #[code cymem]]
|
||||
| #[code cymem.Pool] class, which allows you
|
||||
| to allocate memory which will be freed when the #[code Pool] object
|
||||
| is garbage collected. This means you usually don't have to worry
|
||||
| about freeing memory. You just have to decide which Python object
|
||||
| owns the memory, and make it own the #[code Pool]. When that object
|
||||
| goes out of scope, the memory will be freed. You do have to take
|
||||
| care that no pointers outlive the object that owns them — but this
|
||||
| is generally quite easy.
|
||||
|
||||
p
|
||||
| All Cython modules should have the #[code # cython: infer_types=True]
|
||||
| compiler directive at the top of the file. This makes the code much
|
||||
| cleaner, as it avoids the need for many type declarations. If
|
||||
| possible, you should prefer to declare your functions #[code nogil],
|
||||
| even if you don't especially care about multi-threading. The reason
|
||||
| is that #[code nogil] functions help the Cython compiler reason about
|
||||
| your code quite a lot — you're telling the compiler that no Python
|
||||
| dynamics are possible. This lets many errors be raised, and ensures
|
||||
| your function will run at C speed.
|
||||
|
||||
|
||||
p
|
||||
| Cython gives you many choices of sequences: you could have a Python
|
||||
| list, a numpy array, a memory view, a C++ vector, or a pointer.
|
||||
| Pointers are preferred, because they are fastest, have the most
|
||||
| explicit semantics, and let the compiler check your code more
|
||||
| strictly. C++ vectors are also great — but you should only use them
|
||||
| internally in functions. It's less friendly to accept a vector as an
|
||||
| argument, because that asks the user to do much more work. Here's
|
||||
| how to get a pointer from a numpy array, memory view or vector:
|
||||
|
||||
+code.
|
||||
cdef void get_pointers(np.ndarray[int, mode='c'] numpy_array, vector[int] cpp_vector, int[::1] memory_view) nogil:
|
||||
pointer1 = <int*>numpy_array.data
|
||||
pointer2 = cpp_vector.data()
|
||||
pointer3 = &memory_view[0]
|
||||
|
||||
p
|
||||
| Both C arrays and C++ vectors reassure the compiler that no Python
|
||||
| operations are possible on your variable. This is a big advantage:
|
||||
| it lets the Cython compiler raise many more errors for you.
|
||||
|
||||
p
|
||||
| When getting a pointer from a numpy array or memoryview, take care
|
||||
| that the data is actually stored in C-contiguous order — otherwise
|
||||
| you'll get a pointer to nonsense. The type-declarations in the code
|
||||
| above should generate runtime errors if buffers with incorrect
|
||||
| memory layouts are passed in. To iterate over the array, the
|
||||
| following style is preferred:
|
||||
|
||||
+code.
|
||||
cdef int c_total(const int* int_array, int length) nogil:
|
||||
total = 0
|
||||
for item in int_array[:length]:
|
||||
total += item
|
||||
return total
|
||||
|
||||
p
|
||||
| If this is confusing, consider that the compiler couldn't deal with
|
||||
| #[code for item in int_array:] — there's no length attached to a raw
|
||||
| pointer, so how could we figure out where to stop? The length is
|
||||
| provided in the slice notation as a solution to this. Note that we
|
||||
| don't have to declare the type of #[code item] in the code above —
|
||||
| the compiler can easily infer it. This gives us tidy code that looks
|
||||
| quite like Python, but is exactly as fast as C — because we've made
|
||||
| sure the compilation to C is trivial.
|
||||
|
||||
p
|
||||
| Your functions cannot be declared #[code nogil] if they need to
|
||||
| create Python objects or call Python functions. This is perfectly
|
||||
| okay — you shouldn't torture your code just to get #[code nogil]
|
||||
| functions. However, if your function isn't #[code nogil], you should
|
||||
| compile your module with #[code cython -a --cplus my_module.pyx] and
|
||||
| open the resulting #[code my_module.html] file in a browser. This
|
||||
| will let you see how Cython is compiling your code. Calls into the
|
||||
| Python run-time will be in bright yellow. This lets you easily see
|
||||
| whether Cython is able to correctly type your code, or whether there
|
||||
| are unexpected problems.
|
||||
|
||||
p
|
||||
| Working in Cython is very rewarding once you're over the initial
|
||||
| learning curve. As with C and C++, the first way you write something
|
||||
| in Cython will often be the performance-optimal approach. In
|
||||
| contrast, Python optimisation generally requires a lot of
|
||||
| experimentation. Is it faster to have an #[code if item in my_dict]
|
||||
| check, or to use #[code .get()]? What about
|
||||
| #[code try]/#[code except]? Does this numpy operation create a copy?
|
||||
| There's no way to guess the answers to these questions, and you'll
|
||||
| usually be dissatisfied with your results — so there's no way to
|
||||
| know when to stop this process. In the worst case, you'll make a
|
||||
| mess that invites the next reader to try their luck too. This is
|
||||
| like one of those
|
||||
| #[+a("http://www.wemjournal.org/article/S1080-6032%2809%2970088-2/abstract") volcanic gas-traps],
|
||||
| where the rescuers keep passing out from low oxygen, causing
|
||||
| another rescuer to follow — only to succumb themselves. In short,
|
||||
| just say no to optimizing your Python. If it's not fast enough the
|
||||
| first time, just switch to Cython.
|
||||
|
||||
+infobox("Resources")
|
||||
+list.o-no-block
|
||||
+item #[+a("http://docs.cython.org/en/latest/") Official Cython documentation] (cython.org)
|
||||
+item #[+a("https://explosion.ai/blog/writing-c-in-cython", true) Writing C in Cython] (explosion.ai)
|
||||
+item #[+a("https://explosion.ai/blog/multithreading-with-cython") Multi-threading spaCy’s parser and named entity recogniser] (explosion.ai)
|
141
website/api/_architecture/_nn-model.jade
Normal file
141
website/api/_architecture/_nn-model.jade
Normal file
|
@ -0,0 +1,141 @@
|
|||
//- 💫 DOCS > API > ARCHITECTURE > NN MODEL ARCHITECTURE
|
||||
|
||||
p
|
||||
| The parsing model is a blend of recent results. The two recent
|
||||
| inspirations have been the work of Eli Klipperwasser and Yoav Goldberg at
|
||||
| Bar Ilan#[+fn(1)], and the SyntaxNet team from Google. The foundation of
|
||||
| the parser is still based on the work of Joakim Nivre#[+fn(2)], who
|
||||
| introduced the transition-based framework#[+fn(3)], the arc-eager
|
||||
| transition system, and the imitation learning objective. The model is
|
||||
| implemented using #[+a(gh("thinc")) Thinc], spaCy's machine learning
|
||||
| library. We first predict context-sensitive vectors for each word in the
|
||||
| input:
|
||||
|
||||
+code.
|
||||
(embed_lower | embed_prefix | embed_suffix | embed_shape)
|
||||
>> Maxout(token_width)
|
||||
>> convolution ** 4
|
||||
|
||||
p
|
||||
| This convolutional layer is shared between the tagger, parser and NER,
|
||||
| and will also be shared by the future neural lemmatizer. Because the
|
||||
| parser shares these layers with the tagger, the parser does not require
|
||||
| tag features. I got this trick from David Weiss's "Stack Combination"
|
||||
| paper#[+fn(4)].
|
||||
|
||||
p
|
||||
| To boost the representation, the tagger actually predicts a "super tag"
|
||||
| with POS, morphology and dependency label#[+fn(5)]. The tagger predicts
|
||||
| these supertags by adding a softmax layer onto the convolutional layer –
|
||||
| so, we're teaching the convolutional layer to give us a representation
|
||||
| that's one affine transform from this informative lexical information.
|
||||
| This is obviously good for the parser (which backprops to the
|
||||
| convolutions too). The parser model makes a state vector by concatenating
|
||||
| the vector representations for its context tokens. The current context
|
||||
| tokens:
|
||||
|
||||
+table
|
||||
+row
|
||||
+cell #[code S0], #[code S1], #[code S2]
|
||||
+cell Top three words on the stack.
|
||||
|
||||
+row
|
||||
+cell #[code B0], #[code B1]
|
||||
+cell First two words of the buffer.
|
||||
|
||||
+row
|
||||
+cell.u-nowrap
|
||||
| #[code S0L1], #[code S1L1], #[code S2L1], #[code B0L1],
|
||||
| #[code B1L1]#[br]
|
||||
| #[code S0L2], #[code S1L2], #[code S2L2], #[code B0L2],
|
||||
| #[code B1L2]
|
||||
+cell
|
||||
| Leftmost and second leftmost children of #[code S0], #[code S1],
|
||||
| #[code S2], #[code B0] and #[code B1].
|
||||
|
||||
+row
|
||||
+cell.u-nowrap
|
||||
| #[code S0R1], #[code S1R1], #[code S2R1], #[code B0R1],
|
||||
| #[code B1R1]#[br]
|
||||
| #[code S0R2], #[code S1R2], #[code S2R2], #[code B0R2],
|
||||
| #[code B1R2]
|
||||
+cell
|
||||
| Rightmost and second rightmost children of #[code S0], #[code S1],
|
||||
| #[code S2], #[code B0] and #[code B1].
|
||||
|
||||
p
|
||||
| This makes the state vector quite long: #[code 13*T], where #[code T] is
|
||||
| the token vector width (128 is working well). Fortunately, there's a way
|
||||
| to structure the computation to save some expense (and make it more
|
||||
| GPU-friendly).
|
||||
|
||||
p
|
||||
| The parser typically visits #[code 2*N] states for a sentence of length
|
||||
| #[code N] (although it may visit more, if it back-tracks with a
|
||||
| non-monotonic transition#[+fn(4)]). A naive implementation would require
|
||||
| #[code 2*N (B, 13*T) @ (13*T, H)] matrix multiplications for a batch of
|
||||
| size #[code B]. We can instead perform one #[code (B*N, T) @ (T, 13*H)]
|
||||
| multiplication, to pre-compute the hidden weights for each positional
|
||||
| feature with respect to the words in the batch. (Note that our token
|
||||
| vectors come from the CNN — so we can't play this trick over the
|
||||
| vocabulary. That's how Stanford's NN parser#[+fn(3)] works — and why its
|
||||
| model is so big.)
|
||||
|
||||
p
|
||||
| This pre-computation strategy allows a nice compromise between
|
||||
| GPU-friendliness and implementation simplicity. The CNN and the wide
|
||||
| lower layer are computed on the GPU, and then the precomputed hidden
|
||||
| weights are moved to the CPU, before we start the transition-based
|
||||
| parsing process. This makes a lot of things much easier. We don't have to
|
||||
| worry about variable-length batch sizes, and we don't have to implement
|
||||
| the dynamic oracle in CUDA to train.
|
||||
|
||||
p
|
||||
| Currently the parser's loss function is multilabel log loss#[+fn(6)], as
|
||||
| the dynamic oracle allows multiple states to be 0 cost. This is defined
|
||||
| as follows, where #[code gZ] is the sum of the scores assigned to gold
|
||||
| classes:
|
||||
|
||||
+code.
|
||||
(exp(score) / Z) - (exp(score) / gZ)
|
||||
|
||||
+bibliography
|
||||
+item
|
||||
| #[+a("https://www.semanticscholar.org/paper/Simple-and-Accurate-Dependency-Parsing-Using-Bidir-Kiperwasser-Goldberg/3cf31ecb2724b5088783d7c96a5fc0d5604cbf41") Simple and Accurate Dependency Parsing Using Bidirectional LSTM Feature Representations]
|
||||
br
|
||||
| Eliyahu Kiperwasser, Yoav Goldberg. (2016)
|
||||
|
||||
+item
|
||||
| #[+a("https://www.semanticscholar.org/paper/A-Dynamic-Oracle-for-Arc-Eager-Dependency-Parsing-Goldberg-Nivre/22697256ec19ecc3e14fcfc63624a44cf9c22df4") A Dynamic Oracle for Arc-Eager Dependency Parsing]
|
||||
br
|
||||
| Yoav Goldberg, Joakim Nivre (2012)
|
||||
|
||||
+item
|
||||
| #[+a("https://explosion.ai/blog/parsing-english-in-python") Parsing English in 500 Lines of Python]
|
||||
br
|
||||
| Matthew Honnibal (2013)
|
||||
|
||||
+item
|
||||
| #[+a("https://www.semanticscholar.org/paper/Stack-propagation-Improved-Representation-Learning-Zhang-Weiss/0c133f79b23e8c680891d2e49a66f0e3d37f1466") Stack-propagation: Improved Representation Learning for Syntax]
|
||||
br
|
||||
| Yuan Zhang, David Weiss (2016)
|
||||
|
||||
+item
|
||||
| #[+a("https://www.semanticscholar.org/paper/Deep-multi-task-learning-with-low-level-tasks-supe-S%C3%B8gaard-Goldberg/03ad06583c9721855ccd82c3d969a01360218d86") Deep multi-task learning with low level tasks supervised at lower layers]
|
||||
br
|
||||
| Anders Søgaard, Yoav Goldberg (2016)
|
||||
|
||||
+item
|
||||
| #[+a("https://www.semanticscholar.org/paper/An-Improved-Non-monotonic-Transition-System-for-De-Honnibal-Johnson/4094cee47ade13b77b5ab4d2e6cb9dd2b8a2917c") An Improved Non-monotonic Transition System for Dependency Parsing]
|
||||
br
|
||||
| Matthew Honnibal, Mark Johnson (2015)
|
||||
|
||||
+item
|
||||
| #[+a("http://cs.stanford.edu/people/danqi/papers/emnlp2014.pdf") A Fast and Accurate Dependency Parser using Neural Networks]
|
||||
br
|
||||
| Danqi Cheng, Christopher D. Manning (2014)
|
||||
|
||||
+item
|
||||
| #[+a("https://www.semanticscholar.org/paper/Parsing-the-Wall-Street-Journal-using-a-Lexical-Fu-Riezler-King/0ad07862a91cd59b7eb5de38267e47725a62b8b2") Parsing the Wall Street Journal using a Lexical-Functional Grammar and Discriminative Estimation Techniques]
|
||||
br
|
||||
| Stefan Riezler et al. (2002)
|
|
@ -1,29 +1,32 @@
|
|||
{
|
||||
"sidebar": {
|
||||
"Introduction": {
|
||||
"Facts & Figures": "./",
|
||||
"Languages": "language-models",
|
||||
"Annotation Specs": "annotation"
|
||||
"Overview": {
|
||||
"Architecture": "./",
|
||||
"Annotation Specs": "annotation",
|
||||
"Functions": "top-level"
|
||||
},
|
||||
"Top-level": {
|
||||
"spacy": "spacy",
|
||||
"displacy": "displacy",
|
||||
"Utility Functions": "util",
|
||||
"Command line": "cli"
|
||||
},
|
||||
"Classes": {
|
||||
"Containers": {
|
||||
"Doc": "doc",
|
||||
"Token": "token",
|
||||
"Span": "span",
|
||||
"Lexeme": "lexeme"
|
||||
},
|
||||
|
||||
"Pipeline": {
|
||||
"Language": "language",
|
||||
"Tokenizer": "tokenizer",
|
||||
"Pipe": "pipe",
|
||||
"Tensorizer": "tensorizer",
|
||||
"Tagger": "tagger",
|
||||
"DependencyParser": "dependencyparser",
|
||||
"EntityRecognizer": "entityrecognizer",
|
||||
"TextCategorizer": "textcategorizer",
|
||||
"Tokenizer": "tokenizer",
|
||||
"Lemmatizer": "lemmatizer",
|
||||
"Matcher": "matcher",
|
||||
"Lexeme": "lexeme",
|
||||
"PhraseMatcher": "phrasematcher"
|
||||
},
|
||||
|
||||
"Other": {
|
||||
"Vocab": "vocab",
|
||||
"StringStore": "stringstore",
|
||||
"Vectors": "vectors",
|
||||
|
@ -34,52 +37,37 @@
|
|||
},
|
||||
|
||||
"index": {
|
||||
"title": "Facts & Figures",
|
||||
"next": "language-models"
|
||||
"title": "Architecture",
|
||||
"next": "annotation",
|
||||
"menu": {
|
||||
"Basics": "basics",
|
||||
"Neural Network Model": "nn-model",
|
||||
"Cython Conventions": "cython"
|
||||
}
|
||||
},
|
||||
|
||||
"language-models": {
|
||||
"title": "Languages",
|
||||
"next": "philosophy"
|
||||
},
|
||||
|
||||
"philosophy": {
|
||||
"title": "Philosophy"
|
||||
},
|
||||
|
||||
"spacy": {
|
||||
"title": "spaCy top-level functions",
|
||||
"source": "spacy/__init__.py",
|
||||
"next": "displacy"
|
||||
},
|
||||
|
||||
"displacy": {
|
||||
"title": "displaCy",
|
||||
"tag": "module",
|
||||
"source": "spacy/displacy",
|
||||
"next": "util"
|
||||
},
|
||||
|
||||
"util": {
|
||||
"title": "Utility Functions",
|
||||
"source": "spacy/util.py",
|
||||
"next": "cli"
|
||||
},
|
||||
|
||||
"cli": {
|
||||
"title": "Command Line Interface",
|
||||
"source": "spacy/cli"
|
||||
"top-level": {
|
||||
"title": "Top-level Functions",
|
||||
"menu": {
|
||||
"spacy": "spacy",
|
||||
"displacy": "displacy",
|
||||
"Utility Functions": "util",
|
||||
"Compatibility": "compat",
|
||||
"Command Line": "cli"
|
||||
}
|
||||
},
|
||||
|
||||
"language": {
|
||||
"title": "Language",
|
||||
"tag": "class",
|
||||
"teaser": "A text-processing pipeline.",
|
||||
"source": "spacy/language.py"
|
||||
},
|
||||
|
||||
"doc": {
|
||||
"title": "Doc",
|
||||
"tag": "class",
|
||||
"teaser": "A container for accessing linguistic annotations.",
|
||||
"source": "spacy/tokens/doc.pyx"
|
||||
},
|
||||
|
||||
|
@ -103,6 +91,7 @@
|
|||
|
||||
"vocab": {
|
||||
"title": "Vocab",
|
||||
"teaser": "A storage class for vocabulary and other data shared across a language.",
|
||||
"tag": "class",
|
||||
"source": "spacy/vocab.pyx"
|
||||
},
|
||||
|
@ -115,10 +104,27 @@
|
|||
|
||||
"matcher": {
|
||||
"title": "Matcher",
|
||||
"teaser": "Match sequences of tokens, based on pattern rules.",
|
||||
"tag": "class",
|
||||
"source": "spacy/matcher.pyx"
|
||||
},
|
||||
|
||||
"phrasematcher": {
|
||||
"title": "PhraseMatcher",
|
||||
"teaser": "Match sequences of tokens, based on documents.",
|
||||
"tag": "class",
|
||||
"tag_new": 2,
|
||||
"source": "spacy/matcher.pyx"
|
||||
},
|
||||
|
||||
"pipe": {
|
||||
"title": "Pipe",
|
||||
"teaser": "Abstract base class defining the API for pipeline components.",
|
||||
"tag": "class",
|
||||
"tag_new": 2,
|
||||
"source": "spacy/pipeline.pyx"
|
||||
},
|
||||
|
||||
"dependenyparser": {
|
||||
"title": "DependencyParser",
|
||||
"tag": "class",
|
||||
|
@ -127,18 +133,22 @@
|
|||
|
||||
"entityrecognizer": {
|
||||
"title": "EntityRecognizer",
|
||||
"teaser": "Annotate named entities on documents.",
|
||||
"tag": "class",
|
||||
"source": "spacy/pipeline.pyx"
|
||||
},
|
||||
|
||||
"textcategorizer": {
|
||||
"title": "TextCategorizer",
|
||||
"teaser": "Add text categorization models to spaCy pipelines.",
|
||||
"tag": "class",
|
||||
"tag_new": 2,
|
||||
"source": "spacy/pipeline.pyx"
|
||||
},
|
||||
|
||||
"dependencyparser": {
|
||||
"title": "DependencyParser",
|
||||
"teaser": "Annotate syntactic dependencies on documents.",
|
||||
"tag": "class",
|
||||
"source": "spacy/pipeline.pyx"
|
||||
},
|
||||
|
@ -149,15 +159,23 @@
|
|||
"source": "spacy/tokenizer.pyx"
|
||||
},
|
||||
|
||||
"lemmatizer": {
|
||||
"title": "Lemmatizer",
|
||||
"tag": "class"
|
||||
},
|
||||
|
||||
"tagger": {
|
||||
"title": "Tagger",
|
||||
"teaser": "Annotate part-of-speech tags on documents.",
|
||||
"tag": "class",
|
||||
"source": "spacy/pipeline.pyx"
|
||||
},
|
||||
|
||||
"tensorizer": {
|
||||
"title": "Tensorizer",
|
||||
"teaser": "Add a tensor with position-sensitive meaning representations to a document.",
|
||||
"tag": "class",
|
||||
"tag_new": 2,
|
||||
"source": "spacy/pipeline.pyx"
|
||||
},
|
||||
|
||||
|
@ -169,23 +187,38 @@
|
|||
|
||||
"goldcorpus": {
|
||||
"title": "GoldCorpus",
|
||||
"teaser": "An annotated corpus, using the JSON file format.",
|
||||
"tag": "class",
|
||||
"tag_new": 2,
|
||||
"source": "spacy/gold.pyx"
|
||||
},
|
||||
|
||||
"binder": {
|
||||
"title": "Binder",
|
||||
"tag": "class",
|
||||
"tag_new": 2,
|
||||
"source": "spacy/tokens/binder.pyx"
|
||||
},
|
||||
|
||||
"vectors": {
|
||||
"title": "Vectors",
|
||||
"teaser": "Store, save and load word vectors.",
|
||||
"tag": "class",
|
||||
"tag_new": 2,
|
||||
"source": "spacy/vectors.pyx"
|
||||
},
|
||||
|
||||
"annotation": {
|
||||
"title": "Annotation Specifications"
|
||||
"title": "Annotation Specifications",
|
||||
"teaser": "Schemes used for labels, tags and training data.",
|
||||
"menu": {
|
||||
"Tokenization": "tokenization",
|
||||
"Sentence Boundaries": "sbd",
|
||||
"POS Tagging": "pos-tagging",
|
||||
"Lemmatization": "lemmatization",
|
||||
"Dependencies": "dependency-parsing",
|
||||
"Named Entities": "named-entities",
|
||||
"Training Data": "training"
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,26 +1,17 @@
|
|||
//- 💫 DOCS > USAGE > COMMAND LINE INTERFACE
|
||||
|
||||
include ../../_includes/_mixins
|
||||
//- 💫 DOCS > API > TOP-LEVEL > COMMAND LINE INTERFACE
|
||||
|
||||
p
|
||||
| As of v1.7.0, spaCy comes with new command line helpers to download and
|
||||
| link models and show useful debugging information. For a list of available
|
||||
| commands, type #[code spacy --help].
|
||||
|
||||
+infobox("⚠️ Deprecation note")
|
||||
| As of spaCy 2.0, the #[code model] command to initialise a model data
|
||||
| directory is deprecated. The command was only necessary because previous
|
||||
| versions of spaCy expected a model directory to already be set up. This
|
||||
| has since been changed, so you can use the #[+api("cli#train") #[code train]]
|
||||
| command straight away.
|
||||
|
||||
+h(2, "download") Download
|
||||
+h(3, "download") Download
|
||||
|
||||
p
|
||||
| Download #[+a("/docs/usage/models") models] for spaCy. The downloader finds the
|
||||
| Download #[+a("/usage/models") models] for spaCy. The downloader finds the
|
||||
| best-matching compatible version, uses pip to download the model as a
|
||||
| package and automatically creates a
|
||||
| #[+a("/docs/usage/models#usage") shortcut link] to load the model by name.
|
||||
| #[+a("/usage/models#usage") shortcut link] to load the model by name.
|
||||
| Direct downloads don't perform any compatibility checks and require the
|
||||
| model name to be specified with its version (e.g., #[code en_core_web_sm-1.2.0]).
|
||||
|
||||
|
@ -49,15 +40,15 @@ p
|
|||
| detailed messages in case things go wrong. It's #[strong not recommended]
|
||||
| to use this command as part of an automated process. If you know which
|
||||
| model your project needs, you should consider a
|
||||
| #[+a("/docs/usage/models#download-pip") direct download via pip], or
|
||||
| #[+a("/usage/models#download-pip") direct download via pip], or
|
||||
| uploading the model to a local PyPi installation and fetching it straight
|
||||
| from there. This will also allow you to add it as a versioned package
|
||||
| dependency to your project.
|
||||
|
||||
+h(2, "link") Link
|
||||
+h(3, "link") Link
|
||||
|
||||
p
|
||||
| Create a #[+a("/docs/usage/models#usage") shortcut link] for a model,
|
||||
| Create a #[+a("/usage/models#usage") shortcut link] for a model,
|
||||
| either a Python package or a local directory. This will let you load
|
||||
| models from any location using a custom name via
|
||||
| #[+api("spacy#load") #[code spacy.load()]].
|
||||
|
@ -95,7 +86,7 @@ p
|
|||
+cell flag
|
||||
+cell Show help message and available arguments.
|
||||
|
||||
+h(2, "info") Info
|
||||
+h(3, "info") Info
|
||||
|
||||
p
|
||||
| Print information about your spaCy installation, models and local setup,
|
||||
|
@ -122,15 +113,15 @@ p
|
|||
+cell flag
|
||||
+cell Show help message and available arguments.
|
||||
|
||||
+h(2, "convert") Convert
|
||||
+h(3, "convert") Convert
|
||||
|
||||
p
|
||||
| Convert files into spaCy's #[+a("/docs/api/annotation#json-input") JSON format]
|
||||
| Convert files into spaCy's #[+a("/api/annotation#json-input") JSON format]
|
||||
| for use with the #[code train] command and other experiment management
|
||||
| functions. The right converter is chosen based on the file extension of
|
||||
| the input file. Currently only supports #[code .conllu].
|
||||
|
||||
+code(false, "bash", "$").
|
||||
+code(false, "bash", "$", false, false, true).
|
||||
spacy convert [input_file] [output_dir] [--n-sents] [--morphology]
|
||||
|
||||
+table(["Argument", "Type", "Description"])
|
||||
|
@ -159,14 +150,18 @@ p
|
|||
+cell flag
|
||||
+cell Show help message and available arguments.
|
||||
|
||||
+h(2, "train") Train
|
||||
+h(3, "train") Train
|
||||
|
||||
p
|
||||
| Train a model. Expects data in spaCy's
|
||||
| #[+a("/docs/api/annotation#json-input") JSON format].
|
||||
| #[+a("/api/annotation#json-input") JSON format]. On each epoch, a model
|
||||
| will be saved out to the directory. Accuracy scores and model details
|
||||
| will be added to a #[+a("/usage/training#models-generating") #[code meta.json]]
|
||||
| to allow packaging the model using the
|
||||
| #[+api("cli#package") #[code package]] command.
|
||||
|
||||
+code(false, "bash", "$").
|
||||
spacy train [lang] [output_dir] [train_data] [dev_data] [--n-iter] [--n-sents] [--use-gpu] [--no-tagger] [--no-parser] [--no-entities]
|
||||
+code(false, "bash", "$", false, false, true).
|
||||
spacy train [lang] [output_dir] [train_data] [dev_data] [--n-iter] [--n-sents] [--use-gpu] [--meta-path] [--vectors] [--no-tagger] [--no-parser] [--no-entities] [--gold-preproc]
|
||||
|
||||
+table(["Argument", "Type", "Description"])
|
||||
+row
|
||||
|
@ -204,6 +199,27 @@ p
|
|||
+cell option
|
||||
+cell Use GPU.
|
||||
|
||||
+row
|
||||
+cell #[code --vectors], #[code -v]
|
||||
+cell option
|
||||
+cell Model to load vectors from.
|
||||
|
||||
+row
|
||||
+cell #[code --meta-path], #[code -m]
|
||||
+cell option
|
||||
+cell
|
||||
| #[+tag-new(2)] Optional path to model
|
||||
| #[+a("/usage/training#models-generating") #[code meta.json]].
|
||||
| All relevant properties like #[code lang], #[code pipeline] and
|
||||
| #[code spacy_version] will be overwritten.
|
||||
|
||||
+row
|
||||
+cell #[code --version], #[code -V]
|
||||
+cell option
|
||||
+cell
|
||||
| Model version. Will be written out to the model's
|
||||
| #[code meta.json] after training.
|
||||
|
||||
+row
|
||||
+cell #[code --no-tagger], #[code -T]
|
||||
+cell flag
|
||||
|
@ -219,12 +235,18 @@ p
|
|||
+cell flag
|
||||
+cell Don't train NER.
|
||||
|
||||
+row
|
||||
+cell #[code --gold-preproc], #[code -G]
|
||||
+cell flag
|
||||
+cell Use gold preprocessing.
|
||||
|
||||
+row
|
||||
+cell #[code --help], #[code -h]
|
||||
+cell flag
|
||||
+cell Show help message and available arguments.
|
||||
|
||||
+h(3, "train-hyperparams") Environment variables for hyperparameters
|
||||
+h(4, "train-hyperparams") Environment variables for hyperparameters
|
||||
+tag-new(2)
|
||||
|
||||
p
|
||||
| spaCy lets you set hyperparameters for training via environment variables.
|
||||
|
@ -236,98 +258,149 @@ p
|
|||
+code(false, "bash").
|
||||
parser_hidden_depth=2 parser_maxout_pieces=1 train-parser
|
||||
|
||||
+under-construction
|
||||
|
||||
+table(["Name", "Description", "Default"])
|
||||
+row
|
||||
+cell #[code dropout_from]
|
||||
+cell
|
||||
+cell Initial dropout rate.
|
||||
+cell #[code 0.2]
|
||||
|
||||
+row
|
||||
+cell #[code dropout_to]
|
||||
+cell
|
||||
+cell Final dropout rate.
|
||||
+cell #[code 0.2]
|
||||
|
||||
+row
|
||||
+cell #[code dropout_decay]
|
||||
+cell
|
||||
+cell Rate of dropout change.
|
||||
+cell #[code 0.0]
|
||||
|
||||
+row
|
||||
+cell #[code batch_from]
|
||||
+cell
|
||||
+cell Initial batch size.
|
||||
+cell #[code 1]
|
||||
|
||||
+row
|
||||
+cell #[code batch_to]
|
||||
+cell
|
||||
+cell Final batch size.
|
||||
+cell #[code 64]
|
||||
|
||||
+row
|
||||
+cell #[code batch_compound]
|
||||
+cell
|
||||
+cell Rate of batch size acceleration.
|
||||
+cell #[code 1.001]
|
||||
|
||||
+row
|
||||
+cell #[code token_vector_width]
|
||||
+cell
|
||||
+cell Width of embedding tables and convolutional layers.
|
||||
+cell #[code 128]
|
||||
|
||||
+row
|
||||
+cell #[code embed_size]
|
||||
+cell
|
||||
+cell Number of rows in embedding tables.
|
||||
+cell #[code 7500]
|
||||
|
||||
+row
|
||||
+cell #[code parser_maxout_pieces]
|
||||
+cell
|
||||
+cell Number of pieces in the parser's and NER's first maxout layer.
|
||||
+cell #[code 2]
|
||||
|
||||
+row
|
||||
+cell #[code parser_hidden_depth]
|
||||
+cell
|
||||
+cell Number of hidden layers in the parser and NER.
|
||||
+cell #[code 1]
|
||||
|
||||
+row
|
||||
+cell #[code hidden_width]
|
||||
+cell
|
||||
+cell Size of the parser's and NER's hidden layers.
|
||||
+cell #[code 128]
|
||||
|
||||
+row
|
||||
+cell #[code learn_rate]
|
||||
+cell
|
||||
+cell Learning rate.
|
||||
+cell #[code 0.001]
|
||||
|
||||
+row
|
||||
+cell #[code optimizer_B1]
|
||||
+cell
|
||||
+cell Momentum for the Adam solver.
|
||||
+cell #[code 0.9]
|
||||
|
||||
+row
|
||||
+cell #[code optimizer_B2]
|
||||
+cell
|
||||
+cell Adagrad-momentum for the Adam solver.
|
||||
+cell #[code 0.999]
|
||||
|
||||
+row
|
||||
+cell #[code optimizer_eps]
|
||||
+cell
|
||||
+cell Epsylon value for the Adam solver.
|
||||
+cell #[code 1e-08]
|
||||
|
||||
+row
|
||||
+cell #[code L2_penalty]
|
||||
+cell
|
||||
+cell L2 regularisation penalty.
|
||||
+cell #[code 1e-06]
|
||||
|
||||
+row
|
||||
+cell #[code grad_norm_clip]
|
||||
+cell
|
||||
+cell Gradient L2 norm constraint.
|
||||
+cell #[code 1.0]
|
||||
|
||||
+h(2, "package") Package
|
||||
+h(3, "evaluate") Evaluate
|
||||
+tag-new(2)
|
||||
|
||||
p
|
||||
| Generate a #[+a("/docs/usage/saving-loading#generating") model Python package]
|
||||
| Evaluate a model's accuracy and speed on JSON-formatted annotated data.
|
||||
| Will print the results and optionally export
|
||||
| #[+a("/usage/visualizers") displaCy visualizations] of a sample set of
|
||||
| parses to #[code .html] files. Visualizations for the dependency parse
|
||||
| and NER will be exported as separate files if the respective component
|
||||
| is present in the model's pipeline.
|
||||
|
||||
+code(false, "bash", "$", false, false, true).
|
||||
spacy evaluate [model] [data_path] [--displacy-path] [--displacy-limit] [--gpu-id] [--gold-preproc]
|
||||
|
||||
+table(["Argument", "Type", "Description"])
|
||||
+row
|
||||
+cell #[code model]
|
||||
+cell positional
|
||||
+cell
|
||||
| Model to evaluate. Can be a package or shortcut link name, or a
|
||||
| path to a model data directory.
|
||||
|
||||
+row
|
||||
+cell #[code data_path]
|
||||
+cell positional
|
||||
+cell Location of JSON-formatted evaluation data.
|
||||
|
||||
+row
|
||||
+cell #[code --displacy-path], #[code -dp]
|
||||
+cell option
|
||||
+cell
|
||||
| Directory to output rendered parses as HTML. If not set, no
|
||||
| visualizations will be generated.
|
||||
|
||||
+row
|
||||
+cell #[code --displacy-limit], #[code -dl]
|
||||
+cell option
|
||||
+cell
|
||||
| Number of parses to generate per file. Defaults to #[code 25].
|
||||
| Keep in mind that a significantly higher number might cause the
|
||||
| #[code .html] files to render slowly.
|
||||
|
||||
+row
|
||||
+cell #[code --gpu-id], #[code -g]
|
||||
+cell option
|
||||
+cell GPU to use, if any. Defaults to #[code -1] for CPU.
|
||||
|
||||
+row
|
||||
+cell #[code --gold-preproc], #[code -G]
|
||||
+cell flag
|
||||
+cell Use gold preprocessing.
|
||||
|
||||
|
||||
+h(3, "package") Package
|
||||
|
||||
p
|
||||
| Generate a #[+a("/usage/training#models-generating") model Python package]
|
||||
| from an existing model data directory. All data files are copied over.
|
||||
| If the path to a meta.json is supplied, or a meta.json is found in the
|
||||
| input directory, this file is used. Otherwise, the data can be entered
|
||||
|
@ -336,8 +409,8 @@ p
|
|||
| sure you're always using the latest versions. This means you need to be
|
||||
| connected to the internet to use this command.
|
||||
|
||||
+code(false, "bash", "$").
|
||||
spacy package [input_dir] [output_dir] [--meta] [--force]
|
||||
+code(false, "bash", "$", false, false, true).
|
||||
spacy package [input_dir] [output_dir] [--meta-path] [--create-meta] [--force]
|
||||
|
||||
+table(["Argument", "Type", "Description"])
|
||||
+row
|
||||
|
@ -353,14 +426,14 @@ p
|
|||
+row
|
||||
+cell #[code --meta-path], #[code -m]
|
||||
+cell option
|
||||
+cell Path to meta.json file (optional).
|
||||
+cell #[+tag-new(2)] Path to meta.json file (optional).
|
||||
|
||||
+row
|
||||
+cell #[code --create-meta], #[code -c]
|
||||
+cell flag
|
||||
+cell
|
||||
| Create a meta.json file on the command line, even if one already
|
||||
| exists in the directory.
|
||||
| #[+tag-new(2)] Create a meta.json file on the command line, even
|
||||
| if one already exists in the directory.
|
||||
|
||||
+row
|
||||
+cell #[code --force], #[code -f]
|
91
website/api/_top-level/_compat.jade
Normal file
91
website/api/_top-level/_compat.jade
Normal file
|
@ -0,0 +1,91 @@
|
|||
//- 💫 DOCS > API > TOP-LEVEL > COMPATIBILITY
|
||||
|
||||
p
|
||||
| All Python code is written in an
|
||||
| #[strong intersection of Python 2 and Python 3]. This is easy in Cython,
|
||||
| but somewhat ugly in Python. Logic that deals with Python or platform
|
||||
| compatibility only lives in #[code spacy.compat]. To distinguish them from
|
||||
| the builtin functions, replacement functions are suffixed with an
|
||||
| undersocre, e.e #[code unicode_]. For specific checks, spaCy uses the
|
||||
| #[code six] and #[code ftfy] packages.
|
||||
|
||||
+aside-code("Example").
|
||||
from spacy.compat import unicode_, json_dumps
|
||||
|
||||
compatible_unicode = unicode_('hello world')
|
||||
compatible_json = json_dumps({'key': 'value'})
|
||||
|
||||
+table(["Name", "Python 2", "Python 3"])
|
||||
+row
|
||||
+cell #[code compat.bytes_]
|
||||
+cell #[code str]
|
||||
+cell #[code bytes]
|
||||
|
||||
+row
|
||||
+cell #[code compat.unicode_]
|
||||
+cell #[code unicode]
|
||||
+cell #[code str]
|
||||
|
||||
+row
|
||||
+cell #[code compat.basestring_]
|
||||
+cell #[code basestring]
|
||||
+cell #[code str]
|
||||
|
||||
+row
|
||||
+cell #[code compat.input_]
|
||||
+cell #[code raw_input]
|
||||
+cell #[code input]
|
||||
|
||||
+row
|
||||
+cell #[code compat.json_dumps]
|
||||
+cell #[code ujson.dumps] with #[code .decode('utf8')]
|
||||
+cell #[code ujson.dumps]
|
||||
|
||||
+row
|
||||
+cell #[code compat.path2str]
|
||||
+cell #[code str(path)] with #[code .decode('utf8')]
|
||||
+cell #[code str(path)]
|
||||
|
||||
+h(3, "is_config") compat.is_config
|
||||
+tag function
|
||||
|
||||
p
|
||||
| Check if a specific configuration of Python version and operating system
|
||||
| matches the user's setup. Mostly used to display targeted error messages.
|
||||
|
||||
+aside-code("Example").
|
||||
from spacy.compat import is_config
|
||||
|
||||
if is_config(python2=True, windows=True):
|
||||
print("You are using Python 2 on Windows.")
|
||||
|
||||
+table(["Name", "Type", "Description"])
|
||||
+row
|
||||
+cell #[code python2]
|
||||
+cell bool
|
||||
+cell spaCy is executed with Python 2.x.
|
||||
|
||||
+row
|
||||
+cell #[code python3]
|
||||
+cell bool
|
||||
+cell spaCy is executed with Python 3.x.
|
||||
|
||||
+row
|
||||
+cell #[code windows]
|
||||
+cell bool
|
||||
+cell spaCy is executed on Windows.
|
||||
|
||||
+row
|
||||
+cell #[code linux]
|
||||
+cell bool
|
||||
+cell spaCy is executed on Linux.
|
||||
|
||||
+row
|
||||
+cell #[code osx]
|
||||
+cell bool
|
||||
+cell spaCy is executed on OS X or macOS.
|
||||
|
||||
+row("foot")
|
||||
+cell returns
|
||||
+cell bool
|
||||
+cell Whether the specified configuration matches the user's platform.
|
|
@ -1,14 +1,12 @@
|
|||
//- 💫 DOCS > API > DISPLACY
|
||||
|
||||
include ../../_includes/_mixins
|
||||
//- 💫 DOCS > API > TOP-LEVEL > DISPLACY
|
||||
|
||||
p
|
||||
| As of v2.0, spaCy comes with a built-in visualization suite. For more
|
||||
| info and examples, see the usage guide on
|
||||
| #[+a("/docs/usage/visualizers") visualizing spaCy].
|
||||
| #[+a("/usage/visualizers") visualizing spaCy].
|
||||
|
||||
|
||||
+h(2, "serve") displacy.serve
|
||||
+h(3, "displacy.serve") displacy.serve
|
||||
+tag method
|
||||
+tag-new(2)
|
||||
|
||||
|
@ -60,7 +58,7 @@ p
|
|||
+cell bool
|
||||
+cell
|
||||
| Don't parse #[code Doc] and instead, expect a dict or list of
|
||||
| dicts. #[+a("/docs/usage/visualizers#manual-usage") See here]
|
||||
| dicts. #[+a("/usage/visualizers#manual-usage") See here]
|
||||
| for formats and examples.
|
||||
+cell #[code False]
|
||||
|
||||
|
@ -70,7 +68,7 @@ p
|
|||
+cell Port to serve visualization.
|
||||
+cell #[code 5000]
|
||||
|
||||
+h(2, "render") displacy.render
|
||||
+h(3, "displacy.render") displacy.render
|
||||
+tag method
|
||||
+tag-new(2)
|
||||
|
||||
|
@ -127,24 +125,24 @@ p Render a dependency parse tree or named entity visualization.
|
|||
+cell bool
|
||||
+cell
|
||||
| Don't parse #[code Doc] and instead, expect a dict or list of
|
||||
| dicts. #[+a("/docs/usage/visualizers#manual-usage") See here]
|
||||
| dicts. #[+a("/usage/visualizers#manual-usage") See here]
|
||||
| for formats and examples.
|
||||
+cell #[code False]
|
||||
|
||||
+footrow
|
||||
+row("foot")
|
||||
+cell returns
|
||||
+cell unicode
|
||||
+cell Rendered HTML markup.
|
||||
+cell
|
||||
|
||||
+h(2, "options") Visualizer options
|
||||
+h(3, "displacy_options") Visualizer options
|
||||
|
||||
p
|
||||
| The #[code options] argument lets you specify additional settings for
|
||||
| each visualizer. If a setting is not present in the options, the default
|
||||
| value will be used.
|
||||
|
||||
+h(3, "options-dep") Dependency Visualizer options
|
||||
+h(4, "options-dep") Dependency Visualizer options
|
||||
|
||||
+aside-code("Example").
|
||||
options = {'compact': True, 'color': 'blue'}
|
||||
|
@ -219,7 +217,7 @@ p
|
|||
+cell Distance between words in px.
|
||||
+cell #[code 175] / #[code 85] (compact)
|
||||
|
||||
+h(3, "options-ent") Named Entity Visualizer options
|
||||
+h(4, "displacy_options-ent") Named Entity Visualizer options
|
||||
|
||||
+aside-code("Example").
|
||||
options = {'ents': ['PERSON', 'ORG', 'PRODUCT'],
|
||||
|
@ -244,6 +242,6 @@ p
|
|||
|
||||
p
|
||||
| By default, displaCy comes with colours for all
|
||||
| #[+a("/docs/api/annotation#named-entities") entity types supported by spaCy].
|
||||
| #[+a("/api/annotation#named-entities") entity types supported by spaCy].
|
||||
| If you're using custom entity types, you can use the #[code colors]
|
||||
| setting to add your own colours for them.
|
|
@ -1,15 +1,13 @@
|
|||
//- 💫 DOCS > API > SPACY
|
||||
//- 💫 DOCS > API > TOP-LEVEL > SPACY
|
||||
|
||||
include ../../_includes/_mixins
|
||||
|
||||
+h(2, "load") spacy.load
|
||||
+h(3, "spacy.load") spacy.load
|
||||
+tag function
|
||||
+tag-model
|
||||
|
||||
p
|
||||
| Load a model via its #[+a("/docs/usage/models#usage") shortcut link],
|
||||
| Load a model via its #[+a("/usage/models#usage") shortcut link],
|
||||
| the name of an installed
|
||||
| #[+a("/docs/usage/saving-loading#generating") model package], a unicode
|
||||
| #[+a("/usage/training#models-generating") model package], a unicode
|
||||
| path or a #[code Path]-like object. spaCy will try resolving the load
|
||||
| argument in this order. If a model is loaded from a shortcut link or
|
||||
| package name, spaCy will assume it's a Python package and import it and
|
||||
|
@ -38,25 +36,57 @@ p
|
|||
+cell list
|
||||
+cell
|
||||
| Names of pipeline components to
|
||||
| #[+a("/docs/usage/language-processing-pipeline#disabling") disable].
|
||||
| #[+a("/usage/processing-pipelines#disabling") disable].
|
||||
|
||||
+footrow
|
||||
+row("foot")
|
||||
+cell returns
|
||||
+cell #[code Language]
|
||||
+cell A #[code Language] object with the loaded model.
|
||||
|
||||
+infobox("⚠️ Deprecation note")
|
||||
+infobox("Deprecation note", "⚠️")
|
||||
.o-block
|
||||
| As of spaCy 2.0, the #[code path] keyword argument is deprecated. spaCy
|
||||
| will also raise an error if no model could be loaded and never just
|
||||
| return an empty #[code Language] object. If you need a blank language,
|
||||
| you need to import it explicitly (#[code from spacy.lang.en import English])
|
||||
| or use #[+api("util#get_lang_class") #[code util.get_lang_class]].
|
||||
| you can use the new function #[+api("spacy#blank") #[code spacy.blank()]]
|
||||
| or import the class explicitly, e.g.
|
||||
| #[code from spacy.lang.en import English].
|
||||
|
||||
+code-new nlp = spacy.load('/model')
|
||||
+code-old nlp = spacy.load('en', path='/model')
|
||||
|
||||
+h(2, "info") spacy.info
|
||||
+h(3, "spacy.blank") spacy.blank
|
||||
+tag function
|
||||
+tag-new(2)
|
||||
|
||||
p
|
||||
| Create a blank model of a given language class. This function is the
|
||||
| twin of #[code spacy.load()].
|
||||
|
||||
+aside-code("Example").
|
||||
nlp_en = spacy.blank('en')
|
||||
nlp_de = spacy.blank('de')
|
||||
|
||||
+table(["Name", "Type", "Description"])
|
||||
+row
|
||||
+cell #[code name]
|
||||
+cell unicode
|
||||
+cell ISO code of the language class to load.
|
||||
|
||||
+row
|
||||
+cell #[code disable]
|
||||
+cell list
|
||||
+cell
|
||||
| Names of pipeline components to
|
||||
| #[+a("/usage/processing-pipelines#disabling") disable].
|
||||
|
||||
+row("foot")
|
||||
+cell returns
|
||||
+cell #[code Language]
|
||||
+cell An empty #[code Language] object of the appropriate subclass.
|
||||
|
||||
|
||||
+h(4, "spacy.info") spacy.info
|
||||
+tag function
|
||||
|
||||
p
|
||||
|
@ -83,13 +113,13 @@ p
|
|||
+cell Print information as Markdown.
|
||||
|
||||
|
||||
+h(2, "explain") spacy.explain
|
||||
+h(3, "spacy.explain") spacy.explain
|
||||
+tag function
|
||||
|
||||
p
|
||||
| Get a description for a given POS tag, dependency label or entity type.
|
||||
| For a list of available terms, see
|
||||
| #[+src(gh("spacy", "spacy/glossary.py")) glossary.py].
|
||||
| #[+src(gh("spacy", "spacy/glossary.py")) #[code glossary.py]].
|
||||
|
||||
+aside-code("Example").
|
||||
spacy.explain('NORP')
|
||||
|
@ -107,18 +137,18 @@ p
|
|||
+cell unicode
|
||||
+cell Term to explain.
|
||||
|
||||
+footrow
|
||||
+row("foot")
|
||||
+cell returns
|
||||
+cell unicode
|
||||
+cell The explanation, or #[code None] if not found in the glossary.
|
||||
|
||||
+h(2, "set_factory") spacy.set_factory
|
||||
+h(3, "spacy.set_factory") spacy.set_factory
|
||||
+tag function
|
||||
+tag-new(2)
|
||||
|
||||
p
|
||||
| Set a factory that returns a custom
|
||||
| #[+a("/docs/usage/language-processing-pipeline") processing pipeline]
|
||||
| #[+a("/usage/processing-pipelines") processing pipeline]
|
||||
| component. Factories are useful for creating stateful components, especially ones which depend on shared data.
|
||||
|
||||
+aside-code("Example").
|
|
@ -1,10 +1,8 @@
|
|||
//- 💫 DOCS > API > UTIL
|
||||
|
||||
include ../../_includes/_mixins
|
||||
//- 💫 DOCS > API > TOP-LEVEL > UTIL
|
||||
|
||||
p
|
||||
| spaCy comes with a small collection of utility functions located in
|
||||
| #[+src(gh("spaCy", "spacy/util.py")) spacy/util.py].
|
||||
| #[+src(gh("spaCy", "spacy/util.py")) #[code spacy/util.py]].
|
||||
| Because utility functions are mostly intended for
|
||||
| #[strong internal use within spaCy], their behaviour may change with
|
||||
| future releases. The functions documented on this page should be safe
|
||||
|
@ -12,7 +10,7 @@ p
|
|||
| recommend having additional tests in place if your application depends on
|
||||
| any of spaCy's utilities.
|
||||
|
||||
+h(2, "get_data_path") util.get_data_path
|
||||
+h(3, "util.get_data_path") util.get_data_path
|
||||
+tag function
|
||||
|
||||
p
|
||||
|
@ -25,12 +23,12 @@ p
|
|||
+cell bool
|
||||
+cell Only return path if it exists, otherwise return #[code None].
|
||||
|
||||
+footrow
|
||||
+row("foot")
|
||||
+cell returns
|
||||
+cell #[code Path] / #[code None]
|
||||
+cell Data path or #[code None].
|
||||
|
||||
+h(2, "set_data_path") util.set_data_path
|
||||
+h(3, "util.set_data_path") util.set_data_path
|
||||
+tag function
|
||||
|
||||
p
|
||||
|
@ -47,12 +45,12 @@ p
|
|||
+cell unicode or #[code Path]
|
||||
+cell Path to new data directory.
|
||||
|
||||
+h(2, "get_lang_class") util.get_lang_class
|
||||
+h(3, "util.get_lang_class") util.get_lang_class
|
||||
+tag function
|
||||
|
||||
p
|
||||
| Import and load a #[code Language] class. Allows lazy-loading
|
||||
| #[+a("/docs/usage/adding-languages") language data] and importing
|
||||
| #[+a("/usage/adding-languages") language data] and importing
|
||||
| languages using the two-letter language code.
|
||||
|
||||
+aside-code("Example").
|
||||
|
@ -67,12 +65,12 @@ p
|
|||
+cell unicode
|
||||
+cell Two-letter language code, e.g. #[code 'en'].
|
||||
|
||||
+footrow
|
||||
+row("foot")
|
||||
+cell returns
|
||||
+cell #[code Language]
|
||||
+cell Language class.
|
||||
|
||||
+h(2, "load_model") util.load_model
|
||||
+h(3, "util.load_model") util.load_model
|
||||
+tag function
|
||||
+tag-new(2)
|
||||
|
||||
|
@ -101,12 +99,12 @@ p
|
|||
+cell -
|
||||
+cell Specific overrides, like pipeline components to disable.
|
||||
|
||||
+footrow
|
||||
+row("foot")
|
||||
+cell returns
|
||||
+cell #[code Language]
|
||||
+cell #[code Language] class with the loaded model.
|
||||
|
||||
+h(2, "load_model_from_path") util.load_model_from_path
|
||||
+h(3, "util.load_model_from_path") util.load_model_from_path
|
||||
+tag function
|
||||
+tag-new(2)
|
||||
|
||||
|
@ -139,18 +137,18 @@ p
|
|||
+cell -
|
||||
+cell Specific overrides, like pipeline components to disable.
|
||||
|
||||
+footrow
|
||||
+row("foot")
|
||||
+cell returns
|
||||
+cell #[code Language]
|
||||
+cell #[code Language] class with the loaded model.
|
||||
|
||||
+h(2, "load_model_from_init_py") util.load_model_from_init_py
|
||||
+h(3, "util.load_model_from_init_py") util.load_model_from_init_py
|
||||
+tag function
|
||||
+tag-new(2)
|
||||
|
||||
p
|
||||
| A helper function to use in the #[code load()] method of a model package's
|
||||
| #[+src(gh("spacy-dev-resources", "templates/model/en_model_name/__init__.py")) __init__.py].
|
||||
| #[+src(gh("spacy-dev-resources", "templates/model/en_model_name/__init__.py")) #[code __init__.py]].
|
||||
|
||||
+aside-code("Example").
|
||||
from spacy.util import load_model_from_init_py
|
||||
|
@ -169,12 +167,12 @@ p
|
|||
+cell -
|
||||
+cell Specific overrides, like pipeline components to disable.
|
||||
|
||||
+footrow
|
||||
+row("foot")
|
||||
+cell returns
|
||||
+cell #[code Language]
|
||||
+cell #[code Language] class with the loaded model.
|
||||
|
||||
+h(2, "get_model_meta") util.get_model_meta
|
||||
+h(3, "util.get_model_meta") util.get_model_meta
|
||||
+tag function
|
||||
+tag-new(2)
|
||||
|
||||
|
@ -190,17 +188,17 @@ p
|
|||
+cell unicode or #[code Path]
|
||||
+cell Path to model directory.
|
||||
|
||||
+footrow
|
||||
+row("foot")
|
||||
+cell returns
|
||||
+cell dict
|
||||
+cell The model's meta data.
|
||||
|
||||
+h(2, "is_package") util.is_package
|
||||
+h(3, "util.is_package") util.is_package
|
||||
+tag function
|
||||
|
||||
p
|
||||
| Check if string maps to a package installed via pip. Mainly used to
|
||||
| validate #[+a("/docs/usage/models") model packages].
|
||||
| validate #[+a("/usage/models") model packages].
|
||||
|
||||
+aside-code("Example").
|
||||
util.is_package('en_core_web_sm') # True
|
||||
|
@ -212,18 +210,18 @@ p
|
|||
+cell unicode
|
||||
+cell Name of package.
|
||||
|
||||
+footrow
|
||||
+row("foot")
|
||||
+cell returns
|
||||
+cell #[code bool]
|
||||
+cell #[code True] if installed package, #[code False] if not.
|
||||
|
||||
+h(2, "get_package_path") util.get_package_path
|
||||
+h(3, "util.get_package_path") util.get_package_path
|
||||
+tag function
|
||||
+tag-new(2)
|
||||
|
||||
p
|
||||
| Get path to an installed package. Mainly used to resolve the location of
|
||||
| #[+a("/docs/usage/models") model packages]. Currently imports the package
|
||||
| #[+a("/usage/models") model packages]. Currently imports the package
|
||||
| to find its path.
|
||||
|
||||
+aside-code("Example").
|
||||
|
@ -236,12 +234,12 @@ p
|
|||
+cell unicode
|
||||
+cell Name of installed package.
|
||||
|
||||
+footrow
|
||||
+row("foot")
|
||||
+cell returns
|
||||
+cell #[code Path]
|
||||
+cell Path to model package directory.
|
||||
|
||||
+h(2, "is_in_jupyter") util.is_in_jupyter
|
||||
+h(3, "util.is_in_jupyter") util.is_in_jupyter
|
||||
+tag function
|
||||
+tag-new(2)
|
||||
|
||||
|
@ -257,17 +255,17 @@ p
|
|||
return display(HTML(html))
|
||||
|
||||
+table(["Name", "Type", "Description"])
|
||||
+footrow
|
||||
+row("foot")
|
||||
+cell returns
|
||||
+cell bool
|
||||
+cell #[code True] if in Jupyter, #[code False] if not.
|
||||
|
||||
+h(2, "update_exc") util.update_exc
|
||||
+h(3, "util.update_exc") util.update_exc
|
||||
+tag function
|
||||
|
||||
p
|
||||
| Update, validate and overwrite
|
||||
| #[+a("/docs/usage/adding-languages#tokenizer-exceptions") tokenizer exceptions].
|
||||
| #[+a("/usage/adding-languages#tokenizer-exceptions") tokenizer exceptions].
|
||||
| Used to combine global exceptions with custom, language-specific
|
||||
| exceptions. Will raise an error if key doesn't match #[code ORTH] values.
|
||||
|
||||
|
@ -288,20 +286,20 @@ p
|
|||
+cell dicts
|
||||
+cell Exception dictionaries to add to the base exceptions, in order.
|
||||
|
||||
+footrow
|
||||
+row("foot")
|
||||
+cell returns
|
||||
+cell dict
|
||||
+cell Combined tokenizer exceptions.
|
||||
|
||||
|
||||
+h(2, "prints") util.prints
|
||||
+h(3, "util.prints") util.prints
|
||||
+tag function
|
||||
+tag-new(2)
|
||||
|
||||
p
|
||||
| Print a formatted, text-wrapped message with optional title. If a text
|
||||
| argument is a #[code Path], it's converted to a string. Should only
|
||||
| be used for interactive components like the #[+api("cli") cli].
|
||||
| be used for interactive components like the command-line interface.
|
||||
|
||||
+aside-code("Example").
|
||||
data_path = Path('/some/path')
|
131
website/api/annotation.jade
Normal file
131
website/api/annotation.jade
Normal file
|
@ -0,0 +1,131 @@
|
|||
//- 💫 DOCS > API > ANNOTATION SPECS
|
||||
|
||||
include ../_includes/_mixins
|
||||
|
||||
p This document describes the target annotations spaCy is trained to predict.
|
||||
|
||||
|
||||
+section("tokenization")
|
||||
+h(2, "tokenization") Tokenization
|
||||
|
||||
p
|
||||
| Tokenization standards are based on the
|
||||
| #[+a("https://catalog.ldc.upenn.edu/LDC2013T19") OntoNotes 5] corpus.
|
||||
| The tokenizer differs from most by including tokens for significant
|
||||
| whitespace. Any sequence of whitespace characters beyond a single space
|
||||
| (#[code ' ']) is included as a token.
|
||||
|
||||
+aside-code("Example").
|
||||
from spacy.lang.en import English
|
||||
nlp = English()
|
||||
tokens = nlp('Some\nspaces and\ttab characters')
|
||||
tokens_text = [t.text for t in tokens]
|
||||
assert tokens_text == ['Some', '\n', 'spaces', ' ', 'and',
|
||||
'\t', 'tab', 'characters']
|
||||
|
||||
p
|
||||
| The whitespace tokens are useful for much the same reason punctuation is
|
||||
| – it's often an important delimiter in the text. By preserving it in the
|
||||
| token output, we are able to maintain a simple alignment between the
|
||||
| tokens and the original string, and we ensure that no information is
|
||||
| lost during processing.
|
||||
|
||||
+section("sbd")
|
||||
+h(2, "sentence-boundary") Sentence boundary detection
|
||||
|
||||
p
|
||||
| Sentence boundaries are calculated from the syntactic parse tree, so
|
||||
| features such as punctuation and capitalisation play an important but
|
||||
| non-decisive role in determining the sentence boundaries. Usually this
|
||||
| means that the sentence boundaries will at least coincide with clause
|
||||
| boundaries, even given poorly punctuated text.
|
||||
|
||||
+section("pos-tagging")
|
||||
+h(2, "pos-tagging") Part-of-speech Tagging
|
||||
|
||||
+aside("Tip: Understanding tags")
|
||||
| You can also use #[code spacy.explain()] to get the description for the
|
||||
| string representation of a tag. For example,
|
||||
| #[code spacy.explain("RB")] will return "adverb".
|
||||
|
||||
include _annotation/_pos-tags
|
||||
|
||||
+section("lemmatization")
|
||||
+h(2, "lemmatization") Lemmatization
|
||||
|
||||
p A "lemma" is the uninflected form of a word. In English, this means:
|
||||
|
||||
+list
|
||||
+item #[strong Adjectives]: The form like "happy", not "happier" or "happiest"
|
||||
+item #[strong Adverbs]: The form like "badly", not "worse" or "worst"
|
||||
+item #[strong Nouns]: The form like "dog", not "dogs"; like "child", not "children"
|
||||
+item #[strong Verbs]: The form like "write", not "writes", "writing", "wrote" or "written"
|
||||
|
||||
p
|
||||
| The lemmatization data is taken from
|
||||
| #[+a("https://wordnet.princeton.edu") WordNet]. However, we also add a
|
||||
| special case for pronouns: all pronouns are lemmatized to the special
|
||||
| token #[code -PRON-].
|
||||
|
||||
+infobox("About spaCy's custom pronoun lemma")
|
||||
| Unlike verbs and common nouns, there's no clear base form of a personal
|
||||
| pronoun. Should the lemma of "me" be "I", or should we normalize person
|
||||
| as well, giving "it" — or maybe "he"? spaCy's solution is to introduce a
|
||||
| novel symbol, #[code -PRON-], which is used as the lemma for
|
||||
| all personal pronouns.
|
||||
|
||||
+section("dependency-parsing")
|
||||
+h(2, "dependency-parsing") Syntactic Dependency Parsing
|
||||
|
||||
+aside("Tip: Understanding labels")
|
||||
| You can also use #[code spacy.explain()] to get the description for the
|
||||
| string representation of a label. For example,
|
||||
| #[code spacy.explain("prt")] will return "particle".
|
||||
|
||||
include _annotation/_dep-labels
|
||||
|
||||
+section("named-entities")
|
||||
+h(2, "named-entities") Named Entity Recognition
|
||||
|
||||
+aside("Tip: Understanding entity types")
|
||||
| You can also use #[code spacy.explain()] to get the description for the
|
||||
| string representation of an entity label. For example,
|
||||
| #[code spacy.explain("LANGUAGE")] will return "any named language".
|
||||
|
||||
include _annotation/_named-entities
|
||||
|
||||
+h(3, "biluo") BILUO Scheme
|
||||
|
||||
include _annotation/_biluo
|
||||
|
||||
+section("training")
|
||||
+h(2, "json-input") JSON input format for training
|
||||
|
||||
+under-construction
|
||||
|
||||
p spaCy takes training data in the following format:
|
||||
|
||||
+code("Example structure").
|
||||
doc: {
|
||||
id: string,
|
||||
paragraphs: [{
|
||||
raw: string,
|
||||
sents: [int],
|
||||
tokens: [{
|
||||
start: int,
|
||||
tag: string,
|
||||
head: int,
|
||||
dep: string
|
||||
}],
|
||||
ner: [{
|
||||
start: int,
|
||||
end: int,
|
||||
label: string
|
||||
}],
|
||||
brackets: [{
|
||||
start: int,
|
||||
end: int,
|
||||
label: string
|
||||
}]
|
||||
}]
|
||||
}
|
|
@ -1,6 +1,6 @@
|
|||
//- 💫 DOCS > API > BINDER
|
||||
|
||||
include ../../_includes/_mixins
|
||||
include ../_includes/_mixins
|
||||
|
||||
p A container class for serializing collections of #[code Doc] objects.
|
||||
|
5
website/api/dependencyparser.jade
Normal file
5
website/api/dependencyparser.jade
Normal file
|
@ -0,0 +1,5 @@
|
|||
//- 💫 DOCS > API > DEPENDENCYPARSER
|
||||
|
||||
include ../_includes/_mixins
|
||||
|
||||
!=partial("pipe", { subclass: "DependencyParser", short: "parser", pipeline_id: "parser" })
|
|
@ -1,8 +1,6 @@
|
|||
//- 💫 DOCS > API > DOC
|
||||
|
||||
include ../../_includes/_mixins
|
||||
|
||||
p A container for accessing linguistic annotations.
|
||||
include ../_includes/_mixins
|
||||
|
||||
p
|
||||
| A #[code Doc] is a sequence of #[+api("token") #[code Token]] objects.
|
||||
|
@ -47,7 +45,7 @@ p
|
|||
| subsequent space. Must have the same length as #[code words], if
|
||||
| specified. Defaults to a sequence of #[code True].
|
||||
|
||||
+footrow
|
||||
+row("foot")
|
||||
+cell returns
|
||||
+cell #[code Doc]
|
||||
+cell The newly constructed object.
|
||||
|
@ -73,7 +71,7 @@ p
|
|||
+cell int
|
||||
+cell The index of the token.
|
||||
|
||||
+footrow
|
||||
+row("foot")
|
||||
+cell returns
|
||||
+cell #[code Token]
|
||||
+cell The token at #[code doc[i]].
|
||||
|
@ -96,7 +94,7 @@ p
|
|||
+cell tuple
|
||||
+cell The slice of the document to get.
|
||||
|
||||
+footrow
|
||||
+row("foot")
|
||||
+cell returns
|
||||
+cell #[code Span]
|
||||
+cell The span at #[code doc[start : end]].
|
||||
|
@ -120,7 +118,7 @@ p
|
|||
| from Cython.
|
||||
|
||||
+table(["Name", "Type", "Description"])
|
||||
+footrow
|
||||
+row("foot")
|
||||
+cell yields
|
||||
+cell #[code Token]
|
||||
+cell A #[code Token] object.
|
||||
|
@ -135,7 +133,7 @@ p Get the number of tokens in the document.
|
|||
assert len(doc) == 7
|
||||
|
||||
+table(["Name", "Type", "Description"])
|
||||
+footrow
|
||||
+row("foot")
|
||||
+cell returns
|
||||
+cell int
|
||||
+cell The number of tokens in the document.
|
||||
|
@ -172,7 +170,7 @@ p Create a #[code Span] object from the slice #[code doc.text[start : end]].
|
|||
+cell #[code.u-break numpy.ndarray[ndim=1, dtype='float32']]
|
||||
+cell A meaning representation of the span.
|
||||
|
||||
+footrow
|
||||
+row("foot")
|
||||
+cell returns
|
||||
+cell #[code Span]
|
||||
+cell The newly constructed object.
|
||||
|
@ -200,7 +198,7 @@ p
|
|||
| The object to compare with. By default, accepts #[code Doc],
|
||||
| #[code Span], #[code Token] and #[code Lexeme] objects.
|
||||
|
||||
+footrow
|
||||
+row("foot")
|
||||
+cell returns
|
||||
+cell float
|
||||
+cell A scalar similarity score. Higher is more similar.
|
||||
|
@ -226,7 +224,7 @@ p
|
|||
+cell int
|
||||
+cell The attribute ID
|
||||
|
||||
+footrow
|
||||
+row("foot")
|
||||
+cell returns
|
||||
+cell dict
|
||||
+cell A dictionary mapping attributes to integer counts.
|
||||
|
@ -251,7 +249,7 @@ p
|
|||
+cell list
|
||||
+cell A list of attribute ID ints.
|
||||
|
||||
+footrow
|
||||
+row("foot")
|
||||
+cell returns
|
||||
+cell #[code.u-break numpy.ndarray[ndim=2, dtype='int32']]
|
||||
+cell
|
||||
|
@ -285,7 +283,7 @@ p
|
|||
+cell #[code.u-break numpy.ndarray[ndim=2, dtype='int32']]
|
||||
+cell The attribute values to load.
|
||||
|
||||
+footrow
|
||||
+row("foot")
|
||||
+cell returns
|
||||
+cell #[code Doc]
|
||||
+cell Itself.
|
||||
|
@ -326,7 +324,7 @@ p Loads state from a directory. Modifies the object in place and returns it.
|
|||
| A path to a directory. Paths may be either strings or
|
||||
| #[code Path]-like objects.
|
||||
|
||||
+footrow
|
||||
+row("foot")
|
||||
+cell returns
|
||||
+cell #[code Doc]
|
||||
+cell The modified #[code Doc] object.
|
||||
|
@ -341,7 +339,7 @@ p Serialize, i.e. export the document contents to a binary string.
|
|||
doc_bytes = doc.to_bytes()
|
||||
|
||||
+table(["Name", "Type", "Description"])
|
||||
+footrow
|
||||
+row("foot")
|
||||
+cell returns
|
||||
+cell bytes
|
||||
+cell
|
||||
|
@ -367,7 +365,7 @@ p Deserialize, i.e. import the document contents from a binary string.
|
|||
+cell bytes
|
||||
+cell The string to load from.
|
||||
|
||||
+footrow
|
||||
+row("foot")
|
||||
+cell returns
|
||||
+cell #[code Doc]
|
||||
+cell The #[code Doc] object.
|
||||
|
@ -378,7 +376,7 @@ p Deserialize, i.e. import the document contents from a binary string.
|
|||
p
|
||||
| Retokenize the document, such that the span at
|
||||
| #[code doc.text[start_idx : end_idx]] is merged into a single token. If
|
||||
| #[code start_idx] and #[end_idx] do not mark start and end token
|
||||
| #[code start_idx] and #[code end_idx] do not mark start and end token
|
||||
| boundaries, the document remains unchanged.
|
||||
|
||||
+aside-code("Example").
|
||||
|
@ -405,7 +403,7 @@ p
|
|||
| attributes are inherited from the syntactic root token of
|
||||
| the span.
|
||||
|
||||
+footrow
|
||||
+row("foot")
|
||||
+cell returns
|
||||
+cell #[code Token]
|
||||
+cell
|
||||
|
@ -440,7 +438,7 @@ p
|
|||
+cell bool
|
||||
+cell Don't include arcs or modifiers.
|
||||
|
||||
+footrow
|
||||
+row("foot")
|
||||
+cell returns
|
||||
+cell dict
|
||||
+cell Parse tree as dict.
|
||||
|
@ -462,7 +460,7 @@ p
|
|||
assert ents[0].text == 'Mr. Best'
|
||||
|
||||
+table(["Name", "Type", "Description"])
|
||||
+footrow
|
||||
+row("foot")
|
||||
+cell yields
|
||||
+cell #[code Span]
|
||||
+cell Entities in the document.
|
||||
|
@ -485,7 +483,7 @@ p
|
|||
assert chunks[1].text == "another phrase"
|
||||
|
||||
+table(["Name", "Type", "Description"])
|
||||
+footrow
|
||||
+row("foot")
|
||||
+cell yields
|
||||
+cell #[code Span]
|
||||
+cell Noun chunks in the document.
|
||||
|
@ -507,7 +505,7 @@ p
|
|||
assert [s.root.text for s in sents] == ["is", "'s"]
|
||||
|
||||
+table(["Name", "Type", "Description"])
|
||||
+footrow
|
||||
+row("foot")
|
||||
+cell yields
|
||||
+cell #[code Span]
|
||||
+cell Sentences in the document.
|
||||
|
@ -525,7 +523,7 @@ p
|
|||
assert doc.has_vector
|
||||
|
||||
+table(["Name", "Type", "Description"])
|
||||
+footrow
|
||||
+row("foot")
|
||||
+cell returns
|
||||
+cell bool
|
||||
+cell Whether the document has a vector data attached.
|
||||
|
@ -544,7 +542,7 @@ p
|
|||
assert doc.vector.shape == (300,)
|
||||
|
||||
+table(["Name", "Type", "Description"])
|
||||
+footrow
|
||||
+row("foot")
|
||||
+cell returns
|
||||
+cell #[code.u-break numpy.ndarray[ndim=1, dtype='float32']]
|
||||
+cell A 1D numpy array representing the document's semantics.
|
||||
|
@ -564,7 +562,7 @@ p
|
|||
assert doc1.vector_norm != doc2.vector_norm
|
||||
|
||||
+table(["Name", "Type", "Description"])
|
||||
+footrow
|
||||
+row("foot")
|
||||
+cell returns
|
||||
+cell float
|
||||
+cell The L2 norm of the vector representation.
|
5
website/api/entityrecognizer.jade
Normal file
5
website/api/entityrecognizer.jade
Normal file
|
@ -0,0 +1,5 @@
|
|||
//- 💫 DOCS > API > ENTITYRECOGNIZER
|
||||
|
||||
include ../_includes/_mixins
|
||||
|
||||
!=partial("pipe", { subclass: "EntityRecognizer", short: "ner", pipeline_id: "ner" })
|
|
@ -1,14 +1,12 @@
|
|||
//- 💫 DOCS > API > GOLDCORPUS
|
||||
|
||||
include ../../_includes/_mixins
|
||||
include ../_includes/_mixins
|
||||
|
||||
p
|
||||
| An annotated corpus, using the JSON file format. Manages annotations for
|
||||
| tagging, dependency parsing and NER.
|
||||
| This class manages annotations for tagging, dependency parsing and NER.
|
||||
|
||||
+h(2, "init") GoldCorpus.__init__
|
||||
+tag method
|
||||
+tag-new(2)
|
||||
|
||||
p Create a #[code GoldCorpus].
|
||||
|
|
@ -1,6 +1,6 @@
|
|||
//- 💫 DOCS > API > GOLDPARSE
|
||||
|
||||
include ../../_includes/_mixins
|
||||
include ../_includes/_mixins
|
||||
|
||||
p Collection for training annotations.
|
||||
|
||||
|
@ -40,7 +40,7 @@ p Create a #[code GoldParse].
|
|||
+cell iterable
|
||||
+cell A sequence of named entity annotations, either as BILUO tag strings, or as #[code (start_char, end_char, label)] tuples, representing the entity positions.
|
||||
|
||||
+footrow
|
||||
+row("foot")
|
||||
+cell returns
|
||||
+cell #[code GoldParse]
|
||||
+cell The newly constructed object.
|
||||
|
@ -51,7 +51,7 @@ p Create a #[code GoldParse].
|
|||
p Get the number of gold-standard tokens.
|
||||
|
||||
+table(["Name", "Type", "Description"])
|
||||
+footrow
|
||||
+row("foot")
|
||||
+cell returns
|
||||
+cell int
|
||||
+cell The number of gold-standard tokens.
|
||||
|
@ -64,7 +64,7 @@ p
|
|||
| tree.
|
||||
|
||||
+table(["Name", "Type", "Description"])
|
||||
+footrow
|
||||
+row("foot")
|
||||
+cell returns
|
||||
+cell bool
|
||||
+cell Whether annotations form projective tree.
|
||||
|
@ -119,7 +119,7 @@ p
|
|||
|
||||
p
|
||||
| Encode labelled spans into per-token tags, using the
|
||||
| #[+a("/docs/api/annotation#biluo") BILUO scheme] (Begin/In/Last/Unit/Out).
|
||||
| #[+a("/api/annotation#biluo") BILUO scheme] (Begin/In/Last/Unit/Out).
|
||||
|
||||
p
|
||||
| Returns a list of unicode strings, describing the tags. Each tag string
|
||||
|
@ -157,11 +157,11 @@ p
|
|||
| and #[code end] should be character-offset integers denoting the
|
||||
| slice into the original string.
|
||||
|
||||
+footrow
|
||||
+row("foot")
|
||||
+cell returns
|
||||
+cell list
|
||||
+cell
|
||||
| Unicode strings, describing the
|
||||
| #[+a("/docs/api/annotation#biluo") BILUO] tags.
|
||||
| #[+a("/api/annotation#biluo") BILUO] tags.
|
||||
|
||||
|
14
website/api/index.jade
Normal file
14
website/api/index.jade
Normal file
|
@ -0,0 +1,14 @@
|
|||
//- 💫 DOCS > API > ARCHITECTURE
|
||||
|
||||
include ../_includes/_mixins
|
||||
|
||||
+section("basics")
|
||||
include ../usage/_spacy-101/_architecture
|
||||
|
||||
+section("nn-model")
|
||||
+h(2, "nn-model") Neural network model architecture
|
||||
include _architecture/_nn-model
|
||||
|
||||
+section("cython")
|
||||
+h(2, "cython") Cython conventions
|
||||
include _architecture/_cython
|
|
@ -1,10 +1,10 @@
|
|||
//- 💫 DOCS > API > LANGUAGE
|
||||
|
||||
include ../../_includes/_mixins
|
||||
include ../_includes/_mixins
|
||||
|
||||
p
|
||||
| A text-processing pipeline. Usually you'll load this once per process,
|
||||
| and pass the instance around your application.
|
||||
| Usually you'll load this once per process as #[code nlp] and pass the
|
||||
| instance around your application.
|
||||
|
||||
+h(2, "init") Language.__init__
|
||||
+tag method
|
||||
|
@ -49,7 +49,7 @@ p Initialise a #[code Language] object.
|
|||
| Custom meta data for the #[code Language] class. Is written to by
|
||||
| models to add model meta data.
|
||||
|
||||
+footrow
|
||||
+row("foot")
|
||||
+cell returns
|
||||
+cell #[code Language]
|
||||
+cell The newly constructed object.
|
||||
|
@ -77,14 +77,14 @@ p
|
|||
+cell list
|
||||
+cell
|
||||
| Names of pipeline components to
|
||||
| #[+a("/docs/usage/language-processing-pipeline#disabling") disable].
|
||||
| #[+a("/usage/processing-pipelines#disabling") disable].
|
||||
|
||||
+footrow
|
||||
+row("foot")
|
||||
+cell returns
|
||||
+cell #[code Doc]
|
||||
+cell A container for accessing the annotations.
|
||||
|
||||
+infobox("⚠️ Deprecation note")
|
||||
+infobox("Deprecation note", "⚠️")
|
||||
.o-block
|
||||
| Pipeline components to prevent from being loaded can now be added as
|
||||
| a list to #[code disable], instead of specifying one keyword argument
|
||||
|
@ -136,9 +136,9 @@ p
|
|||
+cell list
|
||||
+cell
|
||||
| Names of pipeline components to
|
||||
| #[+a("/docs/usage/language-processing-pipeline#disabling") disable].
|
||||
| #[+a("/usage/processing-pipelines#disabling") disable].
|
||||
|
||||
+footrow
|
||||
+row("foot")
|
||||
+cell yields
|
||||
+cell #[code Doc]
|
||||
+cell Documents in the order of the original text.
|
||||
|
@ -175,7 +175,7 @@ p Update the models in the pipeline.
|
|||
+cell callable
|
||||
+cell An optimizer.
|
||||
|
||||
+footrow
|
||||
+row("foot")
|
||||
+cell returns
|
||||
+cell dict
|
||||
+cell Results from the update.
|
||||
|
@ -200,7 +200,7 @@ p
|
|||
+cell -
|
||||
+cell Config parameters.
|
||||
|
||||
+footrow
|
||||
+row("foot")
|
||||
+cell yields
|
||||
+cell tuple
|
||||
+cell An optimizer.
|
||||
|
@ -242,7 +242,7 @@ p
|
|||
+cell iterable
|
||||
+cell Tuples of #[code Doc] and #[code GoldParse] objects.
|
||||
|
||||
+footrow
|
||||
+row("foot")
|
||||
+cell yields
|
||||
+cell tuple
|
||||
+cell Tuples of #[code Doc] and #[code GoldParse] objects.
|
||||
|
@ -271,7 +271,7 @@ p
|
|||
+cell list
|
||||
+cell
|
||||
| Names of pipeline components to
|
||||
| #[+a("/docs/usage/language-processing-pipeline#disabling") disable]
|
||||
| #[+a("/usage/processing-pipelines#disabling") disable]
|
||||
| and prevent from being saved.
|
||||
|
||||
+h(2, "from_disk") Language.from_disk
|
||||
|
@ -300,14 +300,14 @@ p
|
|||
+cell list
|
||||
+cell
|
||||
| Names of pipeline components to
|
||||
| #[+a("/docs/usage/language-processing-pipeline#disabling") disable].
|
||||
| #[+a("/usage/processing-pipelines#disabling") disable].
|
||||
|
||||
+footrow
|
||||
+row("foot")
|
||||
+cell returns
|
||||
+cell #[code Language]
|
||||
+cell The modified #[code Language] object.
|
||||
|
||||
+infobox("⚠️ Deprecation note")
|
||||
+infobox("Deprecation note", "⚠️")
|
||||
.o-block
|
||||
| As of spaCy v2.0, the #[code save_to_directory] method has been
|
||||
| renamed to #[code to_disk], to improve consistency across classes.
|
||||
|
@ -332,10 +332,10 @@ p Serialize the current state to a binary string.
|
|||
+cell list
|
||||
+cell
|
||||
| Names of pipeline components to
|
||||
| #[+a("/docs/usage/language-processing-pipeline#disabling") disable]
|
||||
| #[+a("/usage/processing-pipelines#disabling") disable]
|
||||
| and prevent from being serialized.
|
||||
|
||||
+footrow
|
||||
+row("foot")
|
||||
+cell returns
|
||||
+cell bytes
|
||||
+cell The serialized form of the #[code Language] object.
|
||||
|
@ -362,14 +362,14 @@ p Load state from a binary string.
|
|||
+cell list
|
||||
+cell
|
||||
| Names of pipeline components to
|
||||
| #[+a("/docs/usage/language-processing-pipeline#disabling") disable].
|
||||
| #[+a("/usage/processing-pipelines#disabling") disable].
|
||||
|
||||
+footrow
|
||||
+row("foot")
|
||||
+cell returns
|
||||
+cell #[code Language]
|
||||
+cell The #[code Language] object.
|
||||
|
||||
+infobox("⚠️ Deprecation note")
|
||||
+infobox("Deprecation note", "⚠️")
|
||||
.o-block
|
||||
| Pipeline components to prevent from being loaded can now be added as
|
||||
| a list to #[code disable], instead of specifying one keyword argument
|
5
website/api/lemmatizer.jade
Normal file
5
website/api/lemmatizer.jade
Normal file
|
@ -0,0 +1,5 @@
|
|||
//- 💫 DOCS > API > LEMMATIZER
|
||||
|
||||
include ../_includes/_mixins
|
||||
|
||||
+under-construction
|
|
@ -1,6 +1,6 @@
|
|||
//- 💫 DOCS > API > LEXEME
|
||||
|
||||
include ../../_includes/_mixins
|
||||
include ../_includes/_mixins
|
||||
|
||||
p
|
||||
| An entry in the vocabulary. A #[code Lexeme] has no string context – it's
|
||||
|
@ -24,7 +24,7 @@ p Create a #[code Lexeme] object.
|
|||
+cell int
|
||||
+cell The orth id of the lexeme.
|
||||
|
||||
+footrow
|
||||
+row("foot")
|
||||
+cell returns
|
||||
+cell #[code Lexeme]
|
||||
+cell The newly constructed object.
|
||||
|
@ -65,7 +65,7 @@ p Check the value of a boolean flag.
|
|||
+cell int
|
||||
+cell The attribute ID of the flag to query.
|
||||
|
||||
+footrow
|
||||
+row("foot")
|
||||
+cell returns
|
||||
+cell bool
|
||||
+cell The value of the flag.
|
||||
|
@ -91,7 +91,7 @@ p Compute a semantic similarity estimate. Defaults to cosine over vectors.
|
|||
| The object to compare with. By default, accepts #[code Doc],
|
||||
| #[code Span], #[code Token] and #[code Lexeme] objects.
|
||||
|
||||
+footrow
|
||||
+row("foot")
|
||||
+cell returns
|
||||
+cell float
|
||||
+cell A scalar similarity score. Higher is more similar.
|
||||
|
@ -110,7 +110,7 @@ p
|
|||
assert apple.has_vector
|
||||
|
||||
+table(["Name", "Type", "Description"])
|
||||
+footrow
|
||||
+row("foot")
|
||||
+cell returns
|
||||
+cell bool
|
||||
+cell Whether the lexeme has a vector data attached.
|
||||
|
@ -127,7 +127,7 @@ p A real-valued meaning representation.
|
|||
assert apple.vector.shape == (300,)
|
||||
|
||||
+table(["Name", "Type", "Description"])
|
||||
+footrow
|
||||
+row("foot")
|
||||
+cell returns
|
||||
+cell #[code.u-break numpy.ndarray[ndim=1, dtype='float32']]
|
||||
+cell A 1D numpy array representing the lexeme's semantics.
|
||||
|
@ -146,7 +146,7 @@ p The L2 norm of the lexeme's vector representation.
|
|||
assert apple.vector_norm != pasta.vector_norm
|
||||
|
||||
+table(["Name", "Type", "Description"])
|
||||
+footrow
|
||||
+row("foot")
|
||||
+cell returns
|
||||
+cell float
|
||||
+cell The L2 norm of the vector representation.
|
|
@ -1,10 +1,8 @@
|
|||
//- 💫 DOCS > API > MATCHER
|
||||
|
||||
include ../../_includes/_mixins
|
||||
include ../_includes/_mixins
|
||||
|
||||
p Match sequences of tokens, based on pattern rules.
|
||||
|
||||
+infobox("⚠️ Deprecation note")
|
||||
+infobox("Deprecation note", "⚠️")
|
||||
| As of spaCy 2.0, #[code Matcher.add_pattern] and #[code Matcher.add_entity]
|
||||
| are deprecated and have been replaced with a simpler
|
||||
| #[+api("matcher#add") #[code Matcher.add]] that lets you add a list of
|
||||
|
@ -39,7 +37,7 @@ p Create the rule-based #[code Matcher].
|
|||
+cell dict
|
||||
+cell Patterns to add to the matcher, keyed by ID.
|
||||
|
||||
+footrow
|
||||
+row("foot")
|
||||
+cell returns
|
||||
+cell #[code Matcher]
|
||||
+cell The newly constructed object.
|
||||
|
@ -64,7 +62,7 @@ p Find all token sequences matching the supplied patterns on the #[code Doc].
|
|||
+cell #[code Doc]
|
||||
+cell The document to match over.
|
||||
|
||||
+footrow
|
||||
+row("foot")
|
||||
+cell returns
|
||||
+cell list
|
||||
+cell
|
||||
|
@ -81,7 +79,7 @@ p Find all token sequences matching the supplied patterns on the #[code Doc].
|
|||
| actions per pattern within the same matcher. For example, you might only
|
||||
| want to merge some entity types, and set custom flags for other matched
|
||||
| patterns. For more details and examples, see the usage guide on
|
||||
| #[+a("/docs/usage/rule-based-matching") rule-based matching].
|
||||
| #[+a("/usage/linguistic-features#rule-based-matching") rule-based matching].
|
||||
|
||||
+h(2, "pipe") Matcher.pipe
|
||||
+tag method
|
||||
|
@ -113,7 +111,7 @@ p Match a stream of documents, yielding them in turn.
|
|||
| parallel, if the #[code Matcher] implementation supports
|
||||
| multi-threading.
|
||||
|
||||
+footrow
|
||||
+row("foot")
|
||||
+cell yields
|
||||
+cell #[code Doc]
|
||||
+cell Documents, in order.
|
||||
|
@ -134,7 +132,7 @@ p
|
|||
assert len(matcher) == 1
|
||||
|
||||
+table(["Name", "Type", "Description"])
|
||||
+footrow
|
||||
+row("foot")
|
||||
+cell returns
|
||||
+cell int
|
||||
+cell The number of rules.
|
||||
|
@ -156,7 +154,8 @@ p Check whether the matcher contains rules for a match ID.
|
|||
+cell #[code key]
|
||||
+cell unicode
|
||||
+cell The match ID.
|
||||
+footrow
|
||||
|
||||
+row("foot")
|
||||
+cell returns
|
||||
+cell int
|
||||
+cell Whether the matcher contains rules for this match ID.
|
||||
|
@ -203,7 +202,7 @@ p
|
|||
| Match pattern. A pattern consists of a list of dicts, where each
|
||||
| dict describes a token.
|
||||
|
||||
+infobox("⚠️ Deprecation note")
|
||||
+infobox("Deprecation note", "⚠️")
|
||||
.o-block
|
||||
| As of spaCy 2.0, #[code Matcher.add_pattern] and #[code Matcher.add_entity]
|
||||
| are deprecated and have been replaced with a simpler
|
||||
|
@ -257,7 +256,7 @@ p
|
|||
+cell unicode
|
||||
+cell The ID of the match rule.
|
||||
|
||||
+footrow
|
||||
+row("foot")
|
||||
+cell returns
|
||||
+cell tuple
|
||||
+cell The rule, as an #[code (on_match, patterns)] tuple.
|
181
website/api/phrasematcher.jade
Normal file
181
website/api/phrasematcher.jade
Normal file
|
@ -0,0 +1,181 @@
|
|||
//- 💫 DOCS > API > PHRASEMATCHER
|
||||
|
||||
include ../_includes/_mixins
|
||||
|
||||
p
|
||||
| The #[code PhraseMatcher] lets you efficiently match large terminology
|
||||
| lists. While the #[+api("matcher") #[code Matcher]] lets you match
|
||||
| squences based on lists of token descriptions, the #[code PhraseMatcher]
|
||||
| accepts match patterns in the form of #[code Doc] objects.
|
||||
|
||||
+h(2, "init") PhraseMatcher.__init__
|
||||
+tag method
|
||||
|
||||
p Create the rule-based #[code PhraseMatcher].
|
||||
|
||||
+aside-code("Example").
|
||||
from spacy.matcher import PhraseMatcher
|
||||
matcher = PhraseMatcher(nlp.vocab, max_length=6)
|
||||
|
||||
+table(["Name", "Type", "Description"])
|
||||
+row
|
||||
+cell #[code vocab]
|
||||
+cell #[code Vocab]
|
||||
+cell
|
||||
| The vocabulary object, which must be shared with the documents
|
||||
| the matcher will operate on.
|
||||
|
||||
+row
|
||||
+cell #[code max_length]
|
||||
+cell int
|
||||
+cell Mamimum length of a phrase pattern to add.
|
||||
|
||||
+row("foot")
|
||||
+cell returns
|
||||
+cell #[code PhraseMatcher]
|
||||
+cell The newly constructed object.
|
||||
|
||||
+h(2, "call") PhraseMatcher.__call__
|
||||
+tag method
|
||||
|
||||
p Find all token sequences matching the supplied patterns on the #[code Doc].
|
||||
|
||||
+aside-code("Example").
|
||||
from spacy.matcher import PhraseMatcher
|
||||
|
||||
matcher = PhraseMatcher(nlp.vocab)
|
||||
matcher.add('OBAMA', None, nlp(u"Barack Obama"))
|
||||
doc = nlp(u"Barack Obama lifts America one last time in emotional farewell")
|
||||
matches = matcher(doc)
|
||||
|
||||
+table(["Name", "Type", "Description"])
|
||||
+row
|
||||
+cell #[code doc]
|
||||
+cell #[code Doc]
|
||||
+cell The document to match over.
|
||||
|
||||
+row("foot")
|
||||
+cell returns
|
||||
+cell list
|
||||
+cell
|
||||
| A list of #[code (match_id, start, end)] tuples, describing the
|
||||
| matches. A match tuple describes a span #[code doc[start:end]].
|
||||
| The #[code match_id] is the ID of the added match pattern.
|
||||
|
||||
+h(2, "pipe") PhraseMatcher.pipe
|
||||
+tag method
|
||||
|
||||
p Match a stream of documents, yielding them in turn.
|
||||
|
||||
+aside-code("Example").
|
||||
from spacy.matcher import PhraseMatcher
|
||||
matcher = PhraseMatcher(nlp.vocab)
|
||||
for doc in matcher.pipe(texts, batch_size=50, n_threads=4):
|
||||
pass
|
||||
|
||||
+table(["Name", "Type", "Description"])
|
||||
+row
|
||||
+cell #[code docs]
|
||||
+cell iterable
|
||||
+cell A stream of documents.
|
||||
|
||||
+row
|
||||
+cell #[code batch_size]
|
||||
+cell int
|
||||
+cell The number of documents to accumulate into a working set.
|
||||
|
||||
+row
|
||||
+cell #[code n_threads]
|
||||
+cell int
|
||||
+cell
|
||||
| The number of threads with which to work on the buffer in
|
||||
| parallel, if the #[code PhraseMatcher] implementation supports
|
||||
| multi-threading.
|
||||
|
||||
+row("foot")
|
||||
+cell yields
|
||||
+cell #[code Doc]
|
||||
+cell Documents, in order.
|
||||
|
||||
+h(2, "len") PhraseMatcher.__len__
|
||||
+tag method
|
||||
|
||||
p
|
||||
| Get the number of rules added to the matcher. Note that this only returns
|
||||
| the number of rules (identical with the number of IDs), not the number
|
||||
| of individual patterns.
|
||||
|
||||
+aside-code("Example").
|
||||
matcher = PhraseMatcher(nlp.vocab)
|
||||
assert len(matcher) == 0
|
||||
matcher.add('OBAMA', None, nlp(u"Barack Obama"))
|
||||
assert len(matcher) == 1
|
||||
|
||||
+table(["Name", "Type", "Description"])
|
||||
+row("foot")
|
||||
+cell returns
|
||||
+cell int
|
||||
+cell The number of rules.
|
||||
|
||||
+h(2, "contains") PhraseMatcher.__contains__
|
||||
+tag method
|
||||
|
||||
p Check whether the matcher contains rules for a match ID.
|
||||
|
||||
+aside-code("Example").
|
||||
matcher = PhraseMatcher(nlp.vocab)
|
||||
assert len(matcher) == 0
|
||||
matcher.add('OBAMA', None, nlp(u"Barack Obama"))
|
||||
assert len(matcher) == 1
|
||||
|
||||
+table(["Name", "Type", "Description"])
|
||||
+row
|
||||
+cell #[code key]
|
||||
+cell unicode
|
||||
+cell The match ID.
|
||||
|
||||
+row("foot")
|
||||
+cell returns
|
||||
+cell int
|
||||
+cell Whether the matcher contains rules for this match ID.
|
||||
|
||||
+h(2, "add") PhraseMatcher.add
|
||||
+tag method
|
||||
|
||||
p
|
||||
| Add a rule to the matcher, consisting of an ID key, one or more patterns, and
|
||||
| a callback function to act on the matches. The callback function will
|
||||
| receive the arguments #[code matcher], #[code doc], #[code i] and
|
||||
| #[code matches]. If a pattern already exists for the given ID, the
|
||||
| patterns will be extended. An #[code on_match] callback will be
|
||||
| overwritten.
|
||||
|
||||
+aside-code("Example").
|
||||
def on_match(matcher, doc, id, matches):
|
||||
print('Matched!', matches)
|
||||
|
||||
matcher = PhraseMatcher(nlp.vocab)
|
||||
matcher.add('OBAMA', on_match, nlp(u"Barack Obama"))
|
||||
matcher.add('HEALTH', on_match, nlp(u"health care reform"),
|
||||
nlp(u"healthcare reform"))
|
||||
doc = nlp(u"Barack Obama urges Congress to find courage to defend his healthcare reforms")
|
||||
matches = matcher(doc)
|
||||
|
||||
+table(["Name", "Type", "Description"])
|
||||
+row
|
||||
+cell #[code match_id]
|
||||
+cell unicode
|
||||
+cell An ID for the thing you're matching.
|
||||
|
||||
+row
|
||||
+cell #[code on_match]
|
||||
+cell callable or #[code None]
|
||||
+cell
|
||||
| Callback function to act on matches. Takes the arguments
|
||||
| #[code matcher], #[code doc], #[code i] and #[code matches].
|
||||
|
||||
+row
|
||||
+cell #[code *docs]
|
||||
+cell list
|
||||
+cell
|
||||
| #[code Doc] objects of the phrases to match.
|
390
website/api/pipe.jade
Normal file
390
website/api/pipe.jade
Normal file
|
@ -0,0 +1,390 @@
|
|||
//- 💫 DOCS > API > PIPE
|
||||
|
||||
include ../_includes/_mixins
|
||||
|
||||
//- This page can be used as a template for all other classes that inherit
|
||||
//- from `Pipe`.
|
||||
|
||||
if subclass
|
||||
+infobox
|
||||
| This class is a subclass of #[+api("pipe") #[code Pipe]] and
|
||||
| follows the same API. The pipeline component is available in the
|
||||
| #[+a("/usage/processing-pipelines") processing pipeline] via the ID
|
||||
| #[code "#{pipeline_id}"].
|
||||
|
||||
else
|
||||
p
|
||||
| This class is not instantiated directly. Components inherit from it,
|
||||
| and it defines the interface that components should follow to
|
||||
| function as components in a spaCy analysis pipeline.
|
||||
|
||||
- CLASSNAME = subclass || 'Pipe'
|
||||
- VARNAME = short || CLASSNAME.toLowerCase()
|
||||
|
||||
|
||||
+h(2, "model") #{CLASSNAME}.Model
|
||||
+tag classmethod
|
||||
|
||||
p
|
||||
| Initialise a model for the pipe. The model should implement the
|
||||
| #[code thinc.neural.Model] API. Wrappers are available for
|
||||
| #[+a("/usage/deep-learning") most major machine learning libraries].
|
||||
|
||||
+table(["Name", "Type", "Description"])
|
||||
+row
|
||||
+cell #[code **kwargs]
|
||||
+cell -
|
||||
+cell Parameters for initialising the model
|
||||
|
||||
+row("foot")
|
||||
+cell returns
|
||||
+cell object
|
||||
+cell The initialised model.
|
||||
|
||||
+h(2, "init") #{CLASSNAME}.__init__
|
||||
+tag method
|
||||
|
||||
p Create a new pipeline instance.
|
||||
|
||||
+aside-code("Example").
|
||||
from spacy.pipeline import #{CLASSNAME}
|
||||
#{VARNAME} = #{CLASSNAME}(nlp.vocab)
|
||||
|
||||
+table(["Name", "Type", "Description"])
|
||||
+row
|
||||
+cell #[code vocab]
|
||||
+cell #[code Vocab]
|
||||
+cell The shared vocabulary.
|
||||
|
||||
+row
|
||||
+cell #[code model]
|
||||
+cell #[code thinc.neural.Model] or #[code True]
|
||||
+cell
|
||||
| The model powering the pipeline component. If no model is
|
||||
| supplied, the model is created when you call
|
||||
| #[code begin_training], #[code from_disk] or #[code from_bytes].
|
||||
|
||||
+row
|
||||
+cell #[code **cfg]
|
||||
+cell -
|
||||
+cell Configuration parameters.
|
||||
|
||||
+row("foot")
|
||||
+cell returns
|
||||
+cell #[code=CLASSNAME]
|
||||
+cell The newly constructed object.
|
||||
|
||||
+h(2, "call") #{CLASSNAME}.__call__
|
||||
+tag method
|
||||
|
||||
p
|
||||
| Apply the pipe to one document. The document is modified in place, and
|
||||
| returned. Both #[code #{CLASSNAME}.__call__] and
|
||||
| #[code #{CLASSNAME}.pipe] should delegate to the
|
||||
| #[code #{CLASSNAME}.predict] and #[code #{CLASSNAME}.set_annotations]
|
||||
| methods.
|
||||
|
||||
+aside-code("Example").
|
||||
#{VARNAME} = #{CLASSNAME}(nlp.vocab)
|
||||
doc = nlp(u"This is a sentence.")
|
||||
processed = #{VARNAME}(doc)
|
||||
|
||||
+table(["Name", "Type", "Description"])
|
||||
+row
|
||||
+cell #[code doc]
|
||||
+cell #[code Doc]
|
||||
+cell The document to process.
|
||||
|
||||
+row("foot")
|
||||
+cell returns
|
||||
+cell #[code Doc]
|
||||
+cell The processed document.
|
||||
|
||||
+h(2, "pipe") #{CLASSNAME}.pipe
|
||||
+tag method
|
||||
|
||||
p
|
||||
| Apply the pipe to a stream of documents. Both
|
||||
| #[code #{CLASSNAME}.__call__] and #[code #{CLASSNAME}.pipe] should
|
||||
| delegate to the #[code #{CLASSNAME}.predict] and
|
||||
| #[code #{CLASSNAME}.set_annotations] methods.
|
||||
|
||||
+aside-code("Example").
|
||||
texts = [u'One doc', u'...', u'Lots of docs']
|
||||
#{VARNAME} = #{CLASSNAME}(nlp.vocab)
|
||||
for doc in #{VARNAME}.pipe(texts, batch_size=50):
|
||||
pass
|
||||
|
||||
+table(["Name", "Type", "Description"])
|
||||
+row
|
||||
+cell #[code stream]
|
||||
+cell iterable
|
||||
+cell A stream of documents.
|
||||
|
||||
+row
|
||||
+cell #[code batch_size]
|
||||
+cell int
|
||||
+cell The number of texts to buffer. Defaults to #[code 128].
|
||||
|
||||
+row
|
||||
+cell #[code n_threads]
|
||||
+cell int
|
||||
+cell
|
||||
| The number of worker threads to use. If #[code -1], OpenMP will
|
||||
| decide how many to use at run time. Default is #[code -1].
|
||||
|
||||
+row("foot")
|
||||
+cell yields
|
||||
+cell #[code Doc]
|
||||
+cell Processed documents in the order of the original text.
|
||||
|
||||
+h(2, "predict") #{CLASSNAME}.predict
|
||||
+tag method
|
||||
|
||||
p
|
||||
| Apply the pipeline's model to a batch of docs, without modifying them.
|
||||
|
||||
+aside-code("Example").
|
||||
#{VARNAME} = #{CLASSNAME}(nlp.vocab)
|
||||
scores = #{VARNAME}.predict([doc1, doc2])
|
||||
|
||||
+table(["Name", "Type", "Description"])
|
||||
+row
|
||||
+cell #[code docs]
|
||||
+cell iterable
|
||||
+cell The documents to predict.
|
||||
|
||||
+row("foot")
|
||||
+cell returns
|
||||
+cell -
|
||||
+cell Scores from the model.
|
||||
|
||||
+h(2, "set_annotations") #{CLASSNAME}.set_annotations
|
||||
+tag method
|
||||
|
||||
p
|
||||
| Modify a batch of documents, using pre-computed scores.
|
||||
|
||||
+aside-code("Example").
|
||||
#{VARNAME} = #{CLASSNAME}(nlp.vocab)
|
||||
scores = #{VARNAME}.predict([doc1, doc2])
|
||||
#{VARNAME}.set_annotations([doc1, doc2], scores)
|
||||
|
||||
+table(["Name", "Type", "Description"])
|
||||
+row
|
||||
+cell #[code docs]
|
||||
+cell iterable
|
||||
+cell The documents to modify.
|
||||
|
||||
+row
|
||||
+cell #[code scores]
|
||||
+cell -
|
||||
+cell The scores to set, produced by #[code #{CLASSNAME}.predict].
|
||||
|
||||
+h(2, "update") #{CLASSNAME}.update
|
||||
+tag method
|
||||
|
||||
p
|
||||
| Learn from a batch of documents and gold-standard information, updating
|
||||
| the pipe's model. Delegates to #[code #{CLASSNAME}.predict] and
|
||||
| #[code #{CLASSNAME}.get_loss].
|
||||
|
||||
+aside-code("Example").
|
||||
#{VARNAME} = #{CLASSNAME}(nlp.vocab)
|
||||
losses = {}
|
||||
optimizer = nlp.begin_training()
|
||||
#{VARNAME}.update([doc1, doc2], [gold1, gold2], losses=losses, sgd=optimizer)
|
||||
|
||||
+table(["Name", "Type", "Description"])
|
||||
+row
|
||||
+cell #[code docs]
|
||||
+cell iterable
|
||||
+cell A batch of documents to learn from.
|
||||
|
||||
+row
|
||||
+cell #[code golds]
|
||||
+cell iterable
|
||||
+cell The gold-standard data. Must have the same length as #[code docs].
|
||||
|
||||
+row
|
||||
+cell #[code drop]
|
||||
+cell int
|
||||
+cell The dropout rate.
|
||||
|
||||
+row
|
||||
+cell #[code sgd]
|
||||
+cell callable
|
||||
+cell
|
||||
| The optimizer. Should take two arguments #[code weights] and
|
||||
| #[code gradient], and an optional ID.
|
||||
|
||||
+row
|
||||
+cell #[code losses]
|
||||
+cell dict
|
||||
+cell
|
||||
| Optional record of the loss during training. The value keyed by
|
||||
| the model's name is updated.
|
||||
|
||||
+h(2, "get_loss") #{CLASSNAME}.get_loss
|
||||
+tag method
|
||||
|
||||
p
|
||||
| Find the loss and gradient of loss for the batch of documents and their
|
||||
| predicted scores.
|
||||
|
||||
+aside-code("Example").
|
||||
#{VARNAME} = #{CLASSNAME}(nlp.vocab)
|
||||
scores = #{VARNAME}.predict([doc1, doc2])
|
||||
loss, d_loss = #{VARNAME}.get_loss([doc1, doc2], [gold1, gold2], scores)
|
||||
|
||||
+table(["Name", "Type", "Description"])
|
||||
+row
|
||||
+cell #[code docs]
|
||||
+cell iterable
|
||||
+cell The batch of documents.
|
||||
|
||||
+row
|
||||
+cell #[code golds]
|
||||
+cell iterable
|
||||
+cell The gold-standard data. Must have the same length as #[code docs].
|
||||
|
||||
+row
|
||||
+cell #[code scores]
|
||||
+cell -
|
||||
+cell Scores representing the model's predictions.
|
||||
|
||||
+row("foot")
|
||||
+cell returns
|
||||
+cell tuple
|
||||
+cell The loss and the gradient, i.e. #[code (loss, gradient)].
|
||||
|
||||
+h(2, "begin_training") #{CLASSNAME}.begin_training
|
||||
+tag method
|
||||
|
||||
p
|
||||
| Initialize the pipe for training, using data exampes if available. If no
|
||||
| model has been initialized yet, the model is added.
|
||||
|
||||
+aside-code("Example").
|
||||
#{VARNAME} = #{CLASSNAME}(nlp.vocab)
|
||||
nlp.pipeline.append(#{VARNAME})
|
||||
#{VARNAME}.begin_training(pipeline=nlp.pipeline)
|
||||
|
||||
+table(["Name", "Type", "Description"])
|
||||
+row
|
||||
+cell #[code gold_tuples]
|
||||
+cell iterable
|
||||
+cell
|
||||
| Optional gold-standard annotations from which to construct
|
||||
| #[+api("goldparse") #[code GoldParse]] objects.
|
||||
|
||||
+row
|
||||
+cell #[code pipeline]
|
||||
+cell list
|
||||
+cell
|
||||
| Optional list of #[+api("pipe") #[code Pipe]] components that
|
||||
| this component is part of.
|
||||
|
||||
+h(2, "use_params") #{CLASSNAME}.use_params
|
||||
+tag method
|
||||
+tag contextmanager
|
||||
|
||||
p Modify the pipe's model, to use the given parameter values.
|
||||
|
||||
+aside-code("Example").
|
||||
#{VARNAME} = #{CLASSNAME}(nlp.vocab)
|
||||
with #{VARNAME}.use_params():
|
||||
#{VARNAME}.to_disk('/best_model')
|
||||
|
||||
+table(["Name", "Type", "Description"])
|
||||
+row
|
||||
+cell #[code params]
|
||||
+cell -
|
||||
+cell
|
||||
| The parameter values to use in the model. At the end of the
|
||||
| context, the original parameters are restored.
|
||||
|
||||
+h(2, "to_disk") #{CLASSNAME}.to_disk
|
||||
+tag method
|
||||
|
||||
p Serialize the pipe to disk.
|
||||
|
||||
+aside-code("Example").
|
||||
#{VARNAME} = #{CLASSNAME}(nlp.vocab)
|
||||
#{VARNAME}.to_disk('/path/to/#{VARNAME}')
|
||||
|
||||
+table(["Name", "Type", "Description"])
|
||||
+row
|
||||
+cell #[code path]
|
||||
+cell unicode or #[code Path]
|
||||
+cell
|
||||
| A path to a directory, which will be created if it doesn't exist.
|
||||
| Paths may be either strings or #[code Path]-like objects.
|
||||
|
||||
+h(2, "from_disk") #{CLASSNAME}.from_disk
|
||||
+tag method
|
||||
|
||||
p Load the pipe from disk. Modifies the object in place and returns it.
|
||||
|
||||
+aside-code("Example").
|
||||
#{VARNAME} = #{CLASSNAME}(nlp.vocab)
|
||||
#{VARNAME}.from_disk('/path/to/#{VARNAME}')
|
||||
|
||||
+table(["Name", "Type", "Description"])
|
||||
+row
|
||||
+cell #[code path]
|
||||
+cell unicode or #[code Path]
|
||||
+cell
|
||||
| A path to a directory. Paths may be either strings or
|
||||
| #[code Path]-like objects.
|
||||
|
||||
+row("foot")
|
||||
+cell returns
|
||||
+cell #[code=CLASSNAME]
|
||||
+cell The modified #[code=CLASSNAME] object.
|
||||
|
||||
+h(2, "to_bytes") #{CLASSNAME}.to_bytes
|
||||
+tag method
|
||||
|
||||
+aside-code("example").
|
||||
#{VARNAME} = #{CLASSNAME}(nlp.vocab)
|
||||
#{VARNAME}_bytes = #{VARNAME}.to_bytes()
|
||||
|
||||
p Serialize the pipe to a bytestring.
|
||||
|
||||
+table(["Name", "Type", "Description"])
|
||||
+row
|
||||
+cell #[code **exclude]
|
||||
+cell -
|
||||
+cell Named attributes to prevent from being serialized.
|
||||
|
||||
+row("foot")
|
||||
+cell returns
|
||||
+cell bytes
|
||||
+cell The serialized form of the #[code=CLASSNAME] object.
|
||||
|
||||
+h(2, "from_bytes") #{CLASSNAME}.from_bytes
|
||||
+tag method
|
||||
|
||||
p Load the pipe from a bytestring. Modifies the object in place and returns it.
|
||||
|
||||
+aside-code("Example").
|
||||
#{VARNAME}_bytes = #{VARNAME}.to_bytes()
|
||||
#{VARNAME} = #{CLASSNAME}(nlp.vocab)
|
||||
#{VARNAME}.from_bytes(#{VARNAME}_bytes)
|
||||
|
||||
+table(["Name", "Type", "Description"])
|
||||
+row
|
||||
+cell #[code bytes_data]
|
||||
+cell bytes
|
||||
+cell The data to load from.
|
||||
|
||||
+row
|
||||
+cell #[code **exclude]
|
||||
+cell -
|
||||
+cell Named attributes to prevent from being loaded.
|
||||
|
||||
+row("foot")
|
||||
+cell returns
|
||||
+cell #[code=CLASSNAME]
|
||||
+cell The #[code=CLASSNAME] object.
|
|
@ -1,6 +1,6 @@
|
|||
//- 💫 DOCS > API > SPAN
|
||||
|
||||
include ../../_includes/_mixins
|
||||
include ../_includes/_mixins
|
||||
|
||||
p A slice from a #[+api("doc") #[code Doc]] object.
|
||||
|
||||
|
@ -40,7 +40,7 @@ p Create a Span object from the #[code slice doc[start : end]].
|
|||
+cell #[code.u-break numpy.ndarray[ndim=1, dtype='float32']]
|
||||
+cell A meaning representation of the span.
|
||||
|
||||
+footrow
|
||||
+row("foot")
|
||||
+cell returns
|
||||
+cell #[code Span]
|
||||
+cell The newly constructed object.
|
||||
|
@ -61,7 +61,7 @@ p Get a #[code Token] object.
|
|||
+cell int
|
||||
+cell The index of the token within the span.
|
||||
|
||||
+footrow
|
||||
+row("foot")
|
||||
+cell returns
|
||||
+cell #[code Token]
|
||||
+cell The token at #[code span[i]].
|
||||
|
@ -79,7 +79,7 @@ p Get a #[code Span] object.
|
|||
+cell tuple
|
||||
+cell The slice of the span to get.
|
||||
|
||||
+footrow
|
||||
+row("foot")
|
||||
+cell returns
|
||||
+cell #[code Span]
|
||||
+cell The span at #[code span[start : end]].
|
||||
|
@ -95,7 +95,7 @@ p Iterate over #[code Token] objects.
|
|||
assert [t.text for t in span] == ['it', 'back', '!']
|
||||
|
||||
+table(["Name", "Type", "Description"])
|
||||
+footrow
|
||||
+row("foot")
|
||||
+cell yields
|
||||
+cell #[code Token]
|
||||
+cell A #[code Token] object.
|
||||
|
@ -111,7 +111,7 @@ p Get the number of tokens in the span.
|
|||
assert len(span) == 3
|
||||
|
||||
+table(["Name", "Type", "Description"])
|
||||
+footrow
|
||||
+row("foot")
|
||||
+cell returns
|
||||
+cell int
|
||||
+cell The number of tokens in the span.
|
||||
|
@ -140,7 +140,7 @@ p
|
|||
| The object to compare with. By default, accepts #[code Doc],
|
||||
| #[code Span], #[code Token] and #[code Lexeme] objects.
|
||||
|
||||
+footrow
|
||||
+row("foot")
|
||||
+cell returns
|
||||
+cell float
|
||||
+cell A scalar similarity score. Higher is more similar.
|
||||
|
@ -167,7 +167,7 @@ p
|
|||
+cell list
|
||||
+cell A list of attribute ID ints.
|
||||
|
||||
+footrow
|
||||
+row("foot")
|
||||
+cell returns
|
||||
+cell #[code.u-break numpy.ndarray[long, ndim=2]]
|
||||
+cell
|
||||
|
@ -194,7 +194,7 @@ p Retokenize the document, such that the span is merged into a single token.
|
|||
| Attributes to assign to the merged token. By default, attributes
|
||||
| are inherited from the syntactic root token of the span.
|
||||
|
||||
+footrow
|
||||
+row("foot")
|
||||
+cell returns
|
||||
+cell #[code Token]
|
||||
+cell The newly merged token.
|
||||
|
@ -216,7 +216,7 @@ p
|
|||
assert new_york.root.text == 'York'
|
||||
|
||||
+table(["Name", "Type", "Description"])
|
||||
+footrow
|
||||
+row("foot")
|
||||
+cell returns
|
||||
+cell #[code Token]
|
||||
+cell The root token.
|
||||
|
@ -233,7 +233,7 @@ p Tokens that are to the left of the span, whose head is within the span.
|
|||
assert lefts == [u'New']
|
||||
|
||||
+table(["Name", "Type", "Description"])
|
||||
+footrow
|
||||
+row("foot")
|
||||
+cell yields
|
||||
+cell #[code Token]
|
||||
+cell A left-child of a token of the span.
|
||||
|
@ -250,7 +250,7 @@ p Tokens that are to the right of the span, whose head is within the span.
|
|||
assert rights == [u'in']
|
||||
|
||||
+table(["Name", "Type", "Description"])
|
||||
+footrow
|
||||
+row("foot")
|
||||
+cell yields
|
||||
+cell #[code Token]
|
||||
+cell A right-child of a token of the span.
|
||||
|
@ -267,7 +267,7 @@ p Tokens that descend from tokens in the span, but fall outside it.
|
|||
assert subtree == [u'Give', u'it', u'back', u'!']
|
||||
|
||||
+table(["Name", "Type", "Description"])
|
||||
+footrow
|
||||
+row("foot")
|
||||
+cell yields
|
||||
+cell #[code Token]
|
||||
+cell A descendant of a token within the span.
|
||||
|
@ -285,7 +285,7 @@ p
|
|||
assert doc[1:].has_vector
|
||||
|
||||
+table(["Name", "Type", "Description"])
|
||||
+footrow
|
||||
+row("foot")
|
||||
+cell returns
|
||||
+cell bool
|
||||
+cell Whether the span has a vector data attached.
|
||||
|
@ -304,7 +304,7 @@ p
|
|||
assert doc[1:].vector.shape == (300,)
|
||||
|
||||
+table(["Name", "Type", "Description"])
|
||||
+footrow
|
||||
+row("foot")
|
||||
+cell returns
|
||||
+cell #[code.u-break numpy.ndarray[ndim=1, dtype='float32']]
|
||||
+cell A 1D numpy array representing the span's semantics.
|
||||
|
@ -323,7 +323,7 @@ p
|
|||
assert doc[1:].vector_norm != doc[2:].vector_norm
|
||||
|
||||
+table(["Name", "Type", "Description"])
|
||||
+footrow
|
||||
+row("foot")
|
||||
+cell returns
|
||||
+cell float
|
||||
+cell The L2 norm of the vector representation.
|
|
@ -1,6 +1,6 @@
|
|||
//- 💫 DOCS > API > STRINGSTORE
|
||||
|
||||
include ../../_includes/_mixins
|
||||
include ../_includes/_mixins
|
||||
|
||||
p
|
||||
| Look up strings by 64-bit hashes. As of v2.0, spaCy uses hash values
|
||||
|
@ -23,7 +23,7 @@ p
|
|||
+cell iterable
|
||||
+cell A sequence of unicode strings to add to the store.
|
||||
|
||||
+footrow
|
||||
+row("foot")
|
||||
+cell returns
|
||||
+cell #[code StringStore]
|
||||
+cell The newly constructed object.
|
||||
|
@ -38,7 +38,7 @@ p Get the number of strings in the store.
|
|||
assert len(stringstore) == 2
|
||||
|
||||
+table(["Name", "Type", "Description"])
|
||||
+footrow
|
||||
+row("foot")
|
||||
+cell returns
|
||||
+cell int
|
||||
+cell The number of strings in the store.
|
||||
|
@ -60,7 +60,7 @@ p Retrieve a string from a given hash, or vice versa.
|
|||
+cell bytes, unicode or uint64
|
||||
+cell The value to encode.
|
||||
|
||||
+footrow
|
||||
+row("foot")
|
||||
+cell returns
|
||||
+cell unicode or int
|
||||
+cell The value to be retrieved.
|
||||
|
@ -81,7 +81,7 @@ p Check whether a string is in the store.
|
|||
+cell unicode
|
||||
+cell The string to check.
|
||||
|
||||
+footrow
|
||||
+row("foot")
|
||||
+cell returns
|
||||
+cell bool
|
||||
+cell Whether the store contains the string.
|
||||
|
@ -100,7 +100,7 @@ p
|
|||
assert all_strings == [u'apple', u'orange']
|
||||
|
||||
+table(["Name", "Type", "Description"])
|
||||
+footrow
|
||||
+row("foot")
|
||||
+cell yields
|
||||
+cell unicode
|
||||
+cell A string in the store.
|
||||
|
@ -125,7 +125,7 @@ p Add a string to the #[code StringStore].
|
|||
+cell unicode
|
||||
+cell The string to add.
|
||||
|
||||
+footrow
|
||||
+row("foot")
|
||||
+cell returns
|
||||
+cell uint64
|
||||
+cell The string's hash value.
|
||||
|
@ -166,7 +166,7 @@ p Loads state from a directory. Modifies the object in place and returns it.
|
|||
| A path to a directory. Paths may be either strings or
|
||||
| #[code Path]-like objects.
|
||||
|
||||
+footrow
|
||||
+row("foot")
|
||||
+cell returns
|
||||
+cell #[code StringStore]
|
||||
+cell The modified #[code StringStore] object.
|
||||
|
@ -185,7 +185,7 @@ p Serialize the current state to a binary string.
|
|||
+cell -
|
||||
+cell Named attributes to prevent from being serialized.
|
||||
|
||||
+footrow
|
||||
+row("foot")
|
||||
+cell returns
|
||||
+cell bytes
|
||||
+cell The serialized form of the #[code StringStore] object.
|
||||
|
@ -211,7 +211,7 @@ p Load state from a binary string.
|
|||
+cell -
|
||||
+cell Named attributes to prevent from being loaded.
|
||||
|
||||
+footrow
|
||||
+row("foot")
|
||||
+cell returns
|
||||
+cell #[code StringStore]
|
||||
+cell The #[code StringStore] object.
|
||||
|
@ -233,7 +233,7 @@ p Get a 64-bit hash for a given string.
|
|||
+cell unicode
|
||||
+cell The string to hash.
|
||||
|
||||
+footrow
|
||||
+row("foot")
|
||||
+cell returns
|
||||
+cell uint64
|
||||
+cell The hash.
|
5
website/api/tagger.jade
Normal file
5
website/api/tagger.jade
Normal file
|
@ -0,0 +1,5 @@
|
|||
//- 💫 DOCS > API > TAGGER
|
||||
|
||||
include ../_includes/_mixins
|
||||
|
||||
!=partial("pipe", { subclass: "Tagger", pipeline_id: "tagger" })
|
5
website/api/tensorizer.jade
Normal file
5
website/api/tensorizer.jade
Normal file
|
@ -0,0 +1,5 @@
|
|||
//- 💫 DOCS > API > TENSORIZER
|
||||
|
||||
include ../_includes/_mixins
|
||||
|
||||
!=partial("pipe", { subclass: "Tensorizer", pipeline_id: "tensorizer" })
|
19
website/api/textcategorizer.jade
Normal file
19
website/api/textcategorizer.jade
Normal file
|
@ -0,0 +1,19 @@
|
|||
//- 💫 DOCS > API > TEXTCATEGORIZER
|
||||
|
||||
include ../_includes/_mixins
|
||||
|
||||
p
|
||||
| The model supports classification with multiple, non-mutually exclusive
|
||||
| labels. You can change the model architecture rather easily, but by
|
||||
| default, the #[code TextCategorizer] class uses a convolutional
|
||||
| neural network to assign position-sensitive vectors to each word in the
|
||||
| document. This step is similar to the #[+api("tensorizer") #[code Tensorizer]]
|
||||
| component, but the #[code TextCategorizer] uses its own CNN model, to
|
||||
| avoid sharing weights with the other pipeline components. The document
|
||||
| tensor is then
|
||||
| summarized by concatenating max and mean pooling, and a multilayer
|
||||
| perceptron is used to predict an output vector of length #[code nr_class],
|
||||
| before a logistic activation is applied elementwise. The value of each
|
||||
| output neuron is the probability that some class is present.
|
||||
|
||||
!=partial("pipe", { subclass: "TextCategorizer", short: "textcat", pipeline_id: "textcat" })
|
|
@ -1,6 +1,6 @@
|
|||
//- 💫 DOCS > API > TOKEN
|
||||
|
||||
include ../../_includes/_mixins
|
||||
include ../_includes/_mixins
|
||||
|
||||
p An individual token — i.e. a word, punctuation symbol, whitespace, etc.
|
||||
|
||||
|
@ -30,7 +30,7 @@ p Construct a #[code Token] object.
|
|||
+cell int
|
||||
+cell The index of the token within the document.
|
||||
|
||||
+footrow
|
||||
+row("foot")
|
||||
+cell returns
|
||||
+cell #[code Token]
|
||||
+cell The newly constructed object.
|
||||
|
@ -46,7 +46,7 @@ p The number of unicode characters in the token, i.e. #[code token.text].
|
|||
assert len(token) == 4
|
||||
|
||||
+table(["Name", "Type", "Description"])
|
||||
+footrow
|
||||
+row("foot")
|
||||
+cell returns
|
||||
+cell int
|
||||
+cell The number of unicode characters in the token.
|
||||
|
@ -68,7 +68,7 @@ p Check the value of a boolean flag.
|
|||
+cell int
|
||||
+cell The attribute ID of the flag to check.
|
||||
|
||||
+footrow
|
||||
+row("foot")
|
||||
+cell returns
|
||||
+cell bool
|
||||
+cell Whether the flag is set.
|
||||
|
@ -93,7 +93,7 @@ p Compute a semantic similarity estimate. Defaults to cosine over vectors.
|
|||
| The object to compare with. By default, accepts #[code Doc],
|
||||
| #[code Span], #[code Token] and #[code Lexeme] objects.
|
||||
|
||||
+footrow
|
||||
+row("foot")
|
||||
+cell returns
|
||||
+cell float
|
||||
+cell A scalar similarity score. Higher is more similar.
|
||||
|
@ -114,7 +114,7 @@ p Get a neighboring token.
|
|||
+cell int
|
||||
+cell The relative position of the token to get. Defaults to #[code 1].
|
||||
|
||||
+footrow
|
||||
+row("foot")
|
||||
+cell returns
|
||||
+cell #[code Token]
|
||||
+cell The token at position #[code self.doc[self.i+i]].
|
||||
|
@ -139,7 +139,7 @@ p
|
|||
+cell #[code Token]
|
||||
+cell Another token.
|
||||
|
||||
+footrow
|
||||
+row("foot")
|
||||
+cell returns
|
||||
+cell bool
|
||||
+cell Whether this token is the ancestor of the descendant.
|
||||
|
@ -158,7 +158,7 @@ p The rightmost token of this token's syntactic descendants.
|
|||
assert [t.text for t in he_ancestors] == [u'pleaded']
|
||||
|
||||
+table(["Name", "Type", "Description"])
|
||||
+footrow
|
||||
+row("foot")
|
||||
+cell yields
|
||||
+cell #[code Token]
|
||||
+cell
|
||||
|
@ -177,7 +177,7 @@ p A sequence of coordinated tokens, including the token itself.
|
|||
assert [t.text for t in apples_conjuncts] == [u'oranges']
|
||||
|
||||
+table(["Name", "Type", "Description"])
|
||||
+footrow
|
||||
+row("foot")
|
||||
+cell yields
|
||||
+cell #[code Token]
|
||||
+cell A coordinated token.
|
||||
|
@ -194,7 +194,7 @@ p A sequence of the token's immediate syntactic children.
|
|||
assert [t.text for t in give_children] == [u'it', u'back', u'!']
|
||||
|
||||
+table(["Name", "Type", "Description"])
|
||||
+footrow
|
||||
+row("foot")
|
||||
+cell yields
|
||||
+cell #[code Token]
|
||||
+cell A child token such that #[code child.head==self].
|
||||
|
@ -211,7 +211,7 @@ p A sequence of all the token's syntactic descendents.
|
|||
assert [t.text for t in give_subtree] == [u'Give', u'it', u'back', u'!']
|
||||
|
||||
+table(["Name", "Type", "Description"])
|
||||
+footrow
|
||||
+row("foot")
|
||||
+cell yields
|
||||
+cell #[code Token]
|
||||
+cell A descendant token such that #[code self.is_ancestor(descendant)].
|
||||
|
@ -230,7 +230,7 @@ p
|
|||
assert apples.has_vector
|
||||
|
||||
+table(["Name", "Type", "Description"])
|
||||
+footrow
|
||||
+row("foot")
|
||||
+cell returns
|
||||
+cell bool
|
||||
+cell Whether the token has a vector data attached.
|
||||
|
@ -248,7 +248,7 @@ p A real-valued meaning representation.
|
|||
assert apples.vector.shape == (300,)
|
||||
|
||||
+table(["Name", "Type", "Description"])
|
||||
+footrow
|
||||
+row("foot")
|
||||
+cell returns
|
||||
+cell #[code.u-break numpy.ndarray[ndim=1, dtype='float32']]
|
||||
+cell A 1D numpy array representing the token's semantics.
|
||||
|
@ -268,7 +268,7 @@ p The L2 norm of the token's vector representation.
|
|||
assert apples.vector_norm != pasta.vector_norm
|
||||
|
||||
+table(["Name", "Type", "Description"])
|
||||
+footrow
|
||||
+row("foot")
|
||||
+cell returns
|
||||
+cell float
|
||||
+cell The L2 norm of the vector representation.
|
||||
|
@ -280,20 +280,29 @@ p The L2 norm of the token's vector representation.
|
|||
+cell #[code text]
|
||||
+cell unicode
|
||||
+cell Verbatim text content.
|
||||
|
||||
+row
|
||||
+cell #[code text_with_ws]
|
||||
+cell unicode
|
||||
+cell Text content, with trailing space character if present.
|
||||
|
||||
+row
|
||||
+cell #[code whitespace]
|
||||
+cell int
|
||||
+cell Trailing space character if present.
|
||||
+row
|
||||
+cell #[code whitespace_]
|
||||
+cell unicode
|
||||
+cell Trailing space character if present.
|
||||
|
||||
+row
|
||||
+cell #[code orth]
|
||||
+cell int
|
||||
+cell ID of the verbatim text content.
|
||||
|
||||
+row
|
||||
+cell #[code orth_]
|
||||
+cell unicode
|
||||
+cell
|
||||
| Verbatim text content (identical to #[code Token.text]). Existst
|
||||
| mostly for consistency with the other attributes.
|
||||
|
||||
+row
|
||||
+cell #[code vocab]
|
||||
+cell #[code Vocab]
|
|
@ -1,6 +1,6 @@
|
|||
//- 💫 DOCS > API > TOKENIZER
|
||||
|
||||
include ../../_includes/_mixins
|
||||
include ../_includes/_mixins
|
||||
|
||||
p
|
||||
| Segment text, and create #[code Doc] objects with the discovered segment
|
||||
|
@ -57,7 +57,7 @@ p Create a #[code Tokenizer], to create #[code Doc] objects given unicode text.
|
|||
+cell callable
|
||||
+cell A boolean function matching strings to be recognised as tokens.
|
||||
|
||||
+footrow
|
||||
+row("foot")
|
||||
+cell returns
|
||||
+cell #[code Tokenizer]
|
||||
+cell The newly constructed object.
|
||||
|
@ -77,7 +77,7 @@ p Tokenize a string.
|
|||
+cell unicode
|
||||
+cell The string to tokenize.
|
||||
|
||||
+footrow
|
||||
+row("foot")
|
||||
+cell returns
|
||||
+cell #[code Doc]
|
||||
+cell A container for linguistic annotations.
|
||||
|
@ -110,7 +110,7 @@ p Tokenize a stream of texts.
|
|||
| The number of threads to use, if the implementation supports
|
||||
| multi-threading. The default tokenizer is single-threaded.
|
||||
|
||||
+footrow
|
||||
+row("foot")
|
||||
+cell yields
|
||||
+cell #[code Doc]
|
||||
+cell A sequence of Doc objects, in order.
|
||||
|
@ -126,7 +126,7 @@ p Find internal split points of the string.
|
|||
+cell unicode
|
||||
+cell The string to split.
|
||||
|
||||
+footrow
|
||||
+row("foot")
|
||||
+cell returns
|
||||
+cell list
|
||||
+cell
|
||||
|
@ -147,7 +147,7 @@ p
|
|||
+cell unicode
|
||||
+cell The string to segment.
|
||||
|
||||
+footrow
|
||||
+row("foot")
|
||||
+cell returns
|
||||
+cell int
|
||||
+cell The length of the prefix if present, otherwise #[code None].
|
||||
|
@ -165,7 +165,7 @@ p
|
|||
+cell unicode
|
||||
+cell The string to segment.
|
||||
|
||||
+footrow
|
||||
+row("foot")
|
||||
+cell returns
|
||||
+cell int / #[code None]
|
||||
+cell The length of the suffix if present, otherwise #[code None].
|
||||
|
@ -176,7 +176,7 @@ p
|
|||
p
|
||||
| Add a special-case tokenization rule. This mechanism is also used to add
|
||||
| custom tokenizer exceptions to the language data. See the usage guide
|
||||
| on #[+a("/docs/usage/adding-languages#tokenizer-exceptions") adding languages]
|
||||
| on #[+a("/usage/adding-languages#tokenizer-exceptions") adding languages]
|
||||
| for more details and examples.
|
||||
|
||||
+aside-code("Example").
|
24
website/api/top-level.jade
Normal file
24
website/api/top-level.jade
Normal file
|
@ -0,0 +1,24 @@
|
|||
//- 💫 DOCS > API > TOP-LEVEL
|
||||
|
||||
include ../_includes/_mixins
|
||||
|
||||
+section("spacy")
|
||||
//-+h(2, "spacy") spaCy
|
||||
//- spacy/__init__.py
|
||||
include _top-level/_spacy
|
||||
|
||||
+section("displacy")
|
||||
+h(2, "displacy", "spacy/displacy") displaCy
|
||||
include _top-level/_displacy
|
||||
|
||||
+section("util")
|
||||
+h(2, "util", "spacy/util.py") Utility functions
|
||||
include _top-level/_util
|
||||
|
||||
+section("compat")
|
||||
+h(2, "compat", "spacy/compaty.py") Compatibility functions
|
||||
include _top-level/_compat
|
||||
|
||||
+section("cli", "spacy/cli")
|
||||
+h(2, "cli") Command line
|
||||
include _top-level/_cli
|
333
website/api/vectors.jade
Normal file
333
website/api/vectors.jade
Normal file
|
@ -0,0 +1,333 @@
|
|||
//- 💫 DOCS > API > VECTORS
|
||||
|
||||
include ../_includes/_mixins
|
||||
|
||||
p
|
||||
| Vectors data is kept in the #[code Vectors.data] attribute, which should
|
||||
| be an instance of #[code numpy.ndarray] (for CPU vectors) or
|
||||
| #[code cupy.ndarray] (for GPU vectors).
|
||||
|
||||
+h(2, "init") Vectors.__init__
|
||||
+tag method
|
||||
|
||||
p
|
||||
| Create a new vector store. To keep the vector table empty, pass
|
||||
| #[code data_or_width=0]. You can also create the vector table and add
|
||||
| vectors one by one, or set the vector values directly on initialisation.
|
||||
|
||||
+aside-code("Example").
|
||||
from spacy.vectors import Vectors
|
||||
from spacy.strings import StringStore
|
||||
|
||||
empty_vectors = Vectors(StringStore())
|
||||
|
||||
vectors = Vectors([u'cat'], 300)
|
||||
vectors[u'cat'] = numpy.random.uniform(-1, 1, (300,))
|
||||
|
||||
vector_table = numpy.zeros((3, 300), dtype='f')
|
||||
vectors = Vectors(StringStore(), vector_table)
|
||||
|
||||
+table(["Name", "Type", "Description"])
|
||||
+row
|
||||
+cell #[code strings]
|
||||
+cell #[code StringStore] or list
|
||||
+cell
|
||||
| List of strings, or a #[+api("stringstore") #[code StringStore]]
|
||||
| that maps strings to hash values, and vice versa.
|
||||
|
||||
+row
|
||||
+cell #[code data_or_width]
|
||||
+cell #[code.u-break numpy.ndarray[ndim=1, dtype='float32']] or int
|
||||
+cell Vector data or number of dimensions.
|
||||
|
||||
+row("foot")
|
||||
+cell returns
|
||||
+cell #[code Vectors]
|
||||
+cell The newly created object.
|
||||
|
||||
+h(2, "getitem") Vectors.__getitem__
|
||||
+tag method
|
||||
|
||||
p
|
||||
| Get a vector by key. If key is a string, it is hashed to an integer ID
|
||||
| using the #[code Vectors.strings] table. If the integer key is not found
|
||||
| in the table, a #[code KeyError] is raised.
|
||||
|
||||
+aside-code("Example").
|
||||
vectors = Vectors(StringStore(), 300)
|
||||
vectors.add(u'cat', numpy.random.uniform(-1, 1, (300,)))
|
||||
cat_vector = vectors[u'cat']
|
||||
|
||||
+table(["Name", "Type", "Description"])
|
||||
+row
|
||||
+cell #[code key]
|
||||
+cell unicode / int
|
||||
+cell The key to get the vector for.
|
||||
|
||||
+row
|
||||
+cell returns
|
||||
+cell #[code.u-break numpy.ndarray[ndim=1, dtype='float32']]
|
||||
+cell The vector for the key.
|
||||
|
||||
+h(2, "setitem") Vectors.__setitem__
|
||||
+tag method
|
||||
|
||||
p
|
||||
| Set a vector for the given key. If key is a string, it is hashed to an
|
||||
| integer ID using the #[code Vectors.strings] table.
|
||||
|
||||
+aside-code("Example").
|
||||
vectors = Vectors(StringStore(), 300)
|
||||
vectors[u'cat'] = numpy.random.uniform(-1, 1, (300,))
|
||||
|
||||
+table(["Name", "Type", "Description"])
|
||||
+row
|
||||
+cell #[code key]
|
||||
+cell unicode / int
|
||||
+cell The key to set the vector for.
|
||||
|
||||
+row
|
||||
+cell #[code vector]
|
||||
+cell #[code.u-break numpy.ndarray[ndim=1, dtype='float32']]
|
||||
+cell The vector to set.
|
||||
|
||||
+h(2, "iter") Vectors.__iter__
|
||||
+tag method
|
||||
|
||||
p Yield vectors from the table.
|
||||
|
||||
+aside-code("Example").
|
||||
vector_table = numpy.zeros((3, 300), dtype='f')
|
||||
vectors = Vectors(StringStore(), vector_table)
|
||||
for vector in vectors:
|
||||
print(vector)
|
||||
|
||||
+table(["Name", "Type", "Description"])
|
||||
+row("foot")
|
||||
+cell yields
|
||||
+cell #[code.u-break numpy.ndarray[ndim=1, dtype='float32']]
|
||||
+cell A vector from the table.
|
||||
|
||||
+h(2, "len") Vectors.__len__
|
||||
+tag method
|
||||
|
||||
p Return the number of vectors that have been assigned.
|
||||
|
||||
+aside-code("Example").
|
||||
vector_table = numpy.zeros((3, 300), dtype='f')
|
||||
vectors = Vectors(StringStore(), vector_table)
|
||||
assert len(vectors) == 3
|
||||
|
||||
+table(["Name", "Type", "Description"])
|
||||
+row("foot")
|
||||
+cell returns
|
||||
+cell int
|
||||
+cell The number of vectors in the data.
|
||||
|
||||
+h(2, "contains") Vectors.__contains__
|
||||
+tag method
|
||||
|
||||
p
|
||||
| Check whether a key has a vector entry in the table. If key is a string,
|
||||
| it is hashed to an integer ID using the #[code Vectors.strings] table.
|
||||
|
||||
+aside-code("Example").
|
||||
vectors = Vectors(StringStore(), 300)
|
||||
vectors.add(u'cat', numpy.random.uniform(-1, 1, (300,)))
|
||||
assert u'cat' in vectors
|
||||
|
||||
+table(["Name", "Type", "Description"])
|
||||
+row
|
||||
+cell #[code key]
|
||||
+cell unicode / int
|
||||
+cell The key to check.
|
||||
|
||||
+row("foot")
|
||||
+cell returns
|
||||
+cell bool
|
||||
+cell Whether the key has a vector entry.
|
||||
|
||||
+h(2, "add") Vectors.add
|
||||
+tag method
|
||||
|
||||
p
|
||||
| Add a key to the table, optionally setting a vector value as well. If
|
||||
| key is a string, it is hashed to an integer ID using the
|
||||
| #[code Vectors.strings] table.
|
||||
|
||||
+aside-code("Example").
|
||||
vectors = Vectors(StringStore(), 300)
|
||||
vectors.add(u'cat', numpy.random.uniform(-1, 1, (300,)))
|
||||
|
||||
+table(["Name", "Type", "Description"])
|
||||
+row
|
||||
+cell #[code key]
|
||||
+cell unicode / int
|
||||
+cell The key to add.
|
||||
|
||||
+row
|
||||
+cell #[code vector]
|
||||
+cell #[code.u-break numpy.ndarray[ndim=1, dtype='float32']]
|
||||
+cell An optional vector to add.
|
||||
|
||||
+h(2, "items") Vectors.items
|
||||
+tag method
|
||||
|
||||
p Iterate over #[code (string key, vector)] pairs, in order.
|
||||
|
||||
+aside-code("Example").
|
||||
vectors = Vectors(StringStore(), 300)
|
||||
vectors.add(u'cat', numpy.random.uniform(-1, 1, (300,)))
|
||||
for key, vector in vectors.items():
|
||||
print(key, vector)
|
||||
|
||||
+table(["Name", "Type", "Description"])
|
||||
+row("foot")
|
||||
+cell yields
|
||||
+cell tuple
|
||||
+cell #[code (string key, vector)] pairs, in order.
|
||||
|
||||
+h(2, "shape") Vectors.shape
|
||||
+tag property
|
||||
|
||||
p
|
||||
| Get #[code (rows, dims)] tuples of number of rows and number of
|
||||
| dimensions in the vector table.
|
||||
|
||||
+aside-code("Example").
|
||||
vectors = Vectors(StringStore(), 300)
|
||||
vectors.add(u'cat', numpy.random.uniform(-1, 1, (300,)))
|
||||
rows, dims = vectors.shape
|
||||
assert rows == 1
|
||||
assert dims == 300
|
||||
|
||||
+table(["Name", "Type", "Description"])
|
||||
+row("foot")
|
||||
+cell returns
|
||||
+cell tuple
|
||||
+cell #[code (rows, dims)] pairs.
|
||||
|
||||
+h(2, "from_glove") Vectors.from_glove
|
||||
+tag method
|
||||
|
||||
p
|
||||
| Load #[+a("https://nlp.stanford.edu/projects/glove/") GloVe] vectors from
|
||||
| a directory. Assumes binary format, that the vocab is in a
|
||||
| #[code vocab.txt], and that vectors are named
|
||||
| #[code vectors.{size}.[fd].bin], e.g. #[code vectors.128.f.bin] for 128d
|
||||
| float32 vectors, #[code vectors.300.d.bin] for 300d float64 (double)
|
||||
| vectors, etc. By default GloVe outputs 64-bit vectors.
|
||||
|
||||
+table(["Name", "Type", "Description"])
|
||||
+row
|
||||
+cell #[code path]
|
||||
+cell unicode / #[code Path]
|
||||
+cell The path to load the GloVe vectors from.
|
||||
|
||||
+h(2, "to_disk") Vectors.to_disk
|
||||
+tag method
|
||||
|
||||
p Save the current state to a directory.
|
||||
|
||||
+aside-code("Example").
|
||||
vectors.to_disk('/path/to/vectors')
|
||||
|
||||
+table(["Name", "Type", "Description"])
|
||||
+row
|
||||
+cell #[code path]
|
||||
+cell unicode or #[code Path]
|
||||
+cell
|
||||
| A path to a directory, which will be created if it doesn't exist.
|
||||
| Paths may be either strings or #[code Path]-like objects.
|
||||
|
||||
+h(2, "from_disk") Vectors.from_disk
|
||||
+tag method
|
||||
|
||||
p Loads state from a directory. Modifies the object in place and returns it.
|
||||
|
||||
+aside-code("Example").
|
||||
vectors = Vectors(StringStore())
|
||||
vectors.from_disk('/path/to/vectors')
|
||||
|
||||
+table(["Name", "Type", "Description"])
|
||||
+row
|
||||
+cell #[code path]
|
||||
+cell unicode or #[code Path]
|
||||
+cell
|
||||
| A path to a directory. Paths may be either strings or
|
||||
| #[code Path]-like objects.
|
||||
|
||||
+row("foot")
|
||||
+cell returns
|
||||
+cell #[code Vectors]
|
||||
+cell The modified #[code Vectors] object.
|
||||
|
||||
+h(2, "to_bytes") Vectors.to_bytes
|
||||
+tag method
|
||||
|
||||
p Serialize the current state to a binary string.
|
||||
|
||||
+aside-code("Example").
|
||||
vectors_bytes = vectors.to_bytes()
|
||||
|
||||
+table(["Name", "Type", "Description"])
|
||||
+row
|
||||
+cell #[code **exclude]
|
||||
+cell -
|
||||
+cell Named attributes to prevent from being serialized.
|
||||
|
||||
+row("foot")
|
||||
+cell returns
|
||||
+cell bytes
|
||||
+cell The serialized form of the #[code Vectors] object.
|
||||
|
||||
+h(2, "from_bytes") Vectors.from_bytes
|
||||
+tag method
|
||||
|
||||
p Load state from a binary string.
|
||||
|
||||
+aside-code("Example").
|
||||
fron spacy.vectors import Vectors
|
||||
vectors_bytes = vectors.to_bytes()
|
||||
new_vectors = Vectors(StringStore())
|
||||
new_vectors.from_bytes(vectors_bytes)
|
||||
|
||||
+table(["Name", "Type", "Description"])
|
||||
+row
|
||||
+cell #[code bytes_data]
|
||||
+cell bytes
|
||||
+cell The data to load from.
|
||||
|
||||
+row
|
||||
+cell #[code **exclude]
|
||||
+cell -
|
||||
+cell Named attributes to prevent from being loaded.
|
||||
|
||||
+row("foot")
|
||||
+cell returns
|
||||
+cell #[code Vectors]
|
||||
+cell The #[code Vectors] object.
|
||||
|
||||
+h(2, "attributes") Attributes
|
||||
|
||||
+table(["Name", "Type", "Description"])
|
||||
+row
|
||||
+cell #[code data]
|
||||
+cell #[code numpy.ndarray] / #[code cupy.ndarray]
|
||||
+cell
|
||||
| Stored vectors data. #[code numpy] is used for CPU vectors,
|
||||
| #[code cupy] for GPU vectors.
|
||||
|
||||
+row
|
||||
+cell #[code key2row]
|
||||
+cell dict
|
||||
+cell
|
||||
| Dictionary mapping word hashes to rows in the
|
||||
| #[code Vectors.data] table.
|
||||
|
||||
+row
|
||||
+cell #[code keys]
|
||||
+cell #[code numpy.ndarray]
|
||||
+cell
|
||||
| Array keeping the keys in order, such that
|
||||
| #[code keys[vectors.key2row[key]] == key]
|
|
@ -1,17 +1,22 @@
|
|||
//- 💫 DOCS > API > VOCAB
|
||||
|
||||
include ../../_includes/_mixins
|
||||
include ../_includes/_mixins
|
||||
|
||||
p
|
||||
| A lookup table that allows you to access #[code Lexeme] objects. The
|
||||
| #[code Vocab] instance also provides access to the #[code StringStore],
|
||||
| and owns underlying C-data that is shared between #[code Doc] objects.
|
||||
| The #[code Vocab] object provides a lookup table that allows you to
|
||||
| access #[+api("lexeme") #[code Lexeme]] objects, as well as the
|
||||
| #[+api("stringstore") #[code StringStore]]. It also owns underlying
|
||||
| C-data that is shared between #[code Doc] objects.
|
||||
|
||||
+h(2, "init") Vocab.__init__
|
||||
+tag method
|
||||
|
||||
p Create the vocabulary.
|
||||
|
||||
+aside-code("Example").
|
||||
from spacy.vocab import Vocab
|
||||
vocab = Vocab(strings=[u'hello', u'world'])
|
||||
|
||||
+table(["Name", "Type", "Description"])
|
||||
+row
|
||||
+cell #[code lex_attr_getters]
|
||||
|
@ -39,7 +44,7 @@ p Create the vocabulary.
|
|||
| A #[+api("stringstore") #[code StringStore]] that maps
|
||||
| strings to hash values, and vice versa, or a list of strings.
|
||||
|
||||
+footrow
|
||||
+row("foot")
|
||||
+cell returns
|
||||
+cell #[code Vocab]
|
||||
+cell The newly constructed object.
|
||||
|
@ -54,7 +59,7 @@ p Get the current number of lexemes in the vocabulary.
|
|||
assert len(nlp.vocab) > 0
|
||||
|
||||
+table(["Name", "Type", "Description"])
|
||||
+footrow
|
||||
+row("foot")
|
||||
+cell returns
|
||||
+cell int
|
||||
+cell The number of lexems in the vocabulary.
|
||||
|
@ -76,7 +81,7 @@ p
|
|||
+cell int / unicode
|
||||
+cell The hash value of a word, or its unicode string.
|
||||
|
||||
+footrow
|
||||
+row("foot")
|
||||
+cell returns
|
||||
+cell #[code Lexeme]
|
||||
+cell The lexeme indicated by the given ID.
|
||||
|
@ -90,7 +95,7 @@ p Iterate over the lexemes in the vocabulary.
|
|||
stop_words = (lex for lex in nlp.vocab if lex.is_stop)
|
||||
|
||||
+table(["Name", "Type", "Description"])
|
||||
+footrow
|
||||
+row("foot")
|
||||
+cell yields
|
||||
+cell #[code Lexeme]
|
||||
+cell An entry in the vocabulary.
|
||||
|
@ -115,7 +120,7 @@ p
|
|||
+cell unicode
|
||||
+cell The ID string.
|
||||
|
||||
+footrow
|
||||
+row("foot")
|
||||
+cell returns
|
||||
+cell bool
|
||||
+cell Whether the string has an entry in the vocabulary.
|
||||
|
@ -152,11 +157,100 @@ p
|
|||
| which the flag will be stored. If #[code -1], the lowest
|
||||
| available bit will be chosen.
|
||||
|
||||
+footrow
|
||||
+row("foot")
|
||||
+cell returns
|
||||
+cell int
|
||||
+cell The integer ID by which the flag value can be checked.
|
||||
|
||||
+h(2, "add_flag") Vocab.clear_vectors
|
||||
+tag method
|
||||
+tag-new(2)
|
||||
|
||||
p
|
||||
| Drop the current vector table. Because all vectors must be the same
|
||||
| width, you have to call this to change the size of the vectors.
|
||||
|
||||
+aside-code("Example").
|
||||
nlp.vocab.clear_vectors(new_dim=300)
|
||||
|
||||
+table(["Name", "Type", "Description"])
|
||||
+row
|
||||
+cell #[code new_dim]
|
||||
+cell int
|
||||
+cell
|
||||
| Number of dimensions of the new vectors. If #[code None], size
|
||||
| is not changed.
|
||||
|
||||
+h(2, "add_flag") Vocab.get_vector
|
||||
+tag method
|
||||
+tag-new(2)
|
||||
|
||||
p
|
||||
| Retrieve a vector for a word in the vocabulary. Words can be looked up
|
||||
| by string or hash value. If no vectors data is loaded, a
|
||||
| #[code ValueError] is raised.
|
||||
|
||||
+aside-code("Example").
|
||||
nlp.vocab.get_vector(u'apple')
|
||||
|
||||
+table(["Name", "Type", "Description"])
|
||||
+row
|
||||
+cell #[code orth]
|
||||
+cell int / unicode
|
||||
+cell The hash value of a word, or its unicode string.
|
||||
|
||||
+row("foot")
|
||||
+cell returns
|
||||
+cell #[code.u-break numpy.ndarray[ndim=1, dtype='float32']]
|
||||
+cell
|
||||
| A word vector. Size and shape are determined by the
|
||||
| #[code Vocab.vectors] instance.
|
||||
|
||||
+h(2, "add_flag") Vocab.set_vector
|
||||
+tag method
|
||||
+tag-new(2)
|
||||
|
||||
p
|
||||
| Set a vector for a word in the vocabulary. Words can be referenced by
|
||||
| by string or hash value.
|
||||
|
||||
+aside-code("Example").
|
||||
nlp.vocab.set_vector(u'apple', array([...]))
|
||||
|
||||
+table(["Name", "Type", "Description"])
|
||||
+row
|
||||
+cell #[code orth]
|
||||
+cell int / unicode
|
||||
+cell The hash value of a word, or its unicode string.
|
||||
|
||||
+row
|
||||
+cell #[code vector]
|
||||
+cell #[code.u-break numpy.ndarray[ndim=1, dtype='float32']]
|
||||
+cell The vector to set.
|
||||
|
||||
+h(2, "add_flag") Vocab.has_vector
|
||||
+tag method
|
||||
+tag-new(2)
|
||||
|
||||
p
|
||||
| Check whether a word has a vector. Returns #[code False] if no vectors
|
||||
| are loaded. Words can be looked up by string or hash value.
|
||||
|
||||
+aside-code("Example").
|
||||
if nlp.vocab.has_vector(u'apple'):
|
||||
vector = nlp.vocab.get_vector(u'apple')
|
||||
|
||||
+table(["Name", "Type", "Description"])
|
||||
+row
|
||||
+cell #[code orth]
|
||||
+cell int / unicode
|
||||
+cell The hash value of a word, or its unicode string.
|
||||
|
||||
+row("foot")
|
||||
+cell returns
|
||||
+cell bool
|
||||
+cell Whether the word has a vector.
|
||||
|
||||
+h(2, "to_disk") Vocab.to_disk
|
||||
+tag method
|
||||
+tag-new(2)
|
||||
|
@ -192,7 +286,7 @@ p Loads state from a directory. Modifies the object in place and returns it.
|
|||
| A path to a directory. Paths may be either strings or
|
||||
| #[code Path]-like objects.
|
||||
|
||||
+footrow
|
||||
+row("foot")
|
||||
+cell returns
|
||||
+cell #[code Vocab]
|
||||
+cell The modified #[code Vocab] object.
|
||||
|
@ -211,7 +305,7 @@ p Serialize the current state to a binary string.
|
|||
+cell -
|
||||
+cell Named attributes to prevent from being serialized.
|
||||
|
||||
+footrow
|
||||
+row("foot")
|
||||
+cell returns
|
||||
+cell bytes
|
||||
+cell The serialized form of the #[code Vocab] object.
|
||||
|
@ -238,7 +332,7 @@ p Load state from a binary string.
|
|||
+cell -
|
||||
+cell Named attributes to prevent from being loaded.
|
||||
|
||||
+footrow
|
||||
+row("foot")
|
||||
+cell returns
|
||||
+cell #[code Vocab]
|
||||
+cell The #[code Vocab] object.
|
||||
|
@ -256,3 +350,14 @@ p Load state from a binary string.
|
|||
+cell #[code strings]
|
||||
+cell #[code StringStore]
|
||||
+cell A table managing the string-to-int mapping.
|
||||
|
||||
+row
|
||||
+cell #[code vectors]
|
||||
+tag-new(2)
|
||||
+cell #[code Vectors]
|
||||
+cell A table associating word IDs to word vectors.
|
||||
|
||||
+row
|
||||
+cell #[code vectors_length]
|
||||
+cell int
|
||||
+cell Number of dimensions for each word vector.
|
|
@ -19,3 +19,10 @@
|
|||
|
||||
to
|
||||
transform: translate3d(0, 0, 0)
|
||||
|
||||
|
||||
//- Element rotates
|
||||
|
||||
@keyframes rotate
|
||||
to
|
||||
transform: rotate(360deg)
|
||||
|
|
|
@ -1,41 +1,27 @@
|
|||
//- 💫 CSS > BASE > FONTS
|
||||
|
||||
// Source Sans Pro
|
||||
// HK Grotesk
|
||||
|
||||
@font-face
|
||||
font-family: "Source Sans Pro"
|
||||
font-family: "HK Grotesk"
|
||||
font-style: normal
|
||||
font-weight: 400
|
||||
src: url("/assets/fonts/sourcesanspro-regular.eot")
|
||||
src: url("/assets/fonts/sourcesanspro-regular.eot?#iefix") format("embedded-opentype"), url("/assets/fonts/sourcesanspro-regular.woff2") format("woff2"), url("/assets/fonts/sourcesanspro-regular.woff") format("woff"), url("/assets/fonts/sourcesanspro-regular.ttf") format("truetype"), url("/assets/fonts/sourcesanspro-regular.svg#source_sans_proregular") format("svg")
|
||||
font-weight: 500
|
||||
src: url("/assets/fonts/hkgrotesk-semibold.woff2") format("woff2"), url("/assets/fonts/hkgrotesk-semibold.woff") format("woff")
|
||||
|
||||
@font-face
|
||||
font-family: "Source Sans Pro"
|
||||
font-family: "HK Grotesk"
|
||||
font-style: italic
|
||||
font-weight: 400
|
||||
src: url("/assets/fonts/sourcesanspro-italic.eot")
|
||||
src: url("/assets/fonts/sourcesanspro-italic.eot?#iefix") format("embedded-opentype"), url("/assets/fonts/sourcesanspro-italic.woff2") format("woff2"), url("/assets/fonts/sourcesanspro-italic.woff") format("woff"), url("/assets/fonts/sourcesanspro-italic.ttf") format("truetype"), url("/assets/fonts/sourcesanspro-italic.svg#source_sans_proitalic") format("svg")
|
||||
font-weight: 500
|
||||
src: url("/assets/fonts/hkgrotesk-semibolditalic.woff2") format("woff2"), url("/assets/fonts/hkgrotesk-semibolditalic.woff") format("woff")
|
||||
|
||||
@font-face
|
||||
font-family: "Source Sans Pro"
|
||||
font-style: normal
|
||||
font-weight: 700
|
||||
src: url("/assets/fonts/sourcesanspro-bold.eot")
|
||||
src: url("/assets/fonts/sourcesanspro-bold.eot?#iefix") format("embedded-opentype"), url("/assets/fonts/sourcesanspro-bold.woff2") format("woff2"), url("/assets/fonts/sourcesanspro-bold.woff") format("woff"), url("/assets/fonts/sourcesanspro-bold.ttf") format("truetype"), url("/assets/fonts/sourcesanspro-bold.svg#source_sans_probold") format("svg")
|
||||
|
||||
@font-face
|
||||
font-family: "Source Sans Pro"
|
||||
font-style: italic
|
||||
font-weight: 700
|
||||
src: url("/assets/fonts/sourcesanspro-bolditalic.eot")
|
||||
src: url("/assets/fonts/sourcesanspro-bolditalic.eot?#iefix") format("embedded-opentype"), url("/assets/fonts/sourcesanspro-bolditalic.woff2") format("woff2"), url("/assets/fonts/sourcesanspro-bolditalic.woff") format("woff"), url("/assets/fonts/sourcesanspro-bolditalic.ttf") format("truetype"), url("/assets/fonts/sourcesanspro-bolditalic.svg#source_sans_probold_italic") format("svg")
|
||||
|
||||
|
||||
// Source Code Pro
|
||||
|
||||
@font-face
|
||||
font-family: "Source Code Pro"
|
||||
font-family: "HK Grotesk"
|
||||
font-style: normal
|
||||
font-weight: 600
|
||||
src: url("/assets/fonts/sourcecodepro-semibold.eot")
|
||||
src: url("/assets/fonts/sourcecodepro-semibold.eot?#iefix") format("embedded-opentype"), url("/assets/fonts/sourcecodepro-semibold.woff") format("woff"), url("/assets/fonts/sourcecodepro-semibold.ttf") format("truetype"), url("/assets/fonts/sourcecodepro-semibold.svg#sourcecodepro_semibold") format("svg")
|
||||
src: url("/assets/fonts/hkgrotesk-bold.woff2") format("woff2"), url("/assets/fonts/hkgrotesk-bold.woff") format("woff")
|
||||
|
||||
@font-face
|
||||
font-family: "HK Grotesk"
|
||||
font-style: italic
|
||||
font-weight: 600
|
||||
src: url("/assets/fonts/hkgrotesk-bolditalic.woff2") format("woff2"), url("/assets/fonts/hkgrotesk-bolditalic.woff") format("woff")
|
||||
|
|
|
@ -15,6 +15,15 @@
|
|||
align-items: center
|
||||
justify-content: center
|
||||
|
||||
&.o-grid--vcenter
|
||||
align-items: center
|
||||
|
||||
&.o-grid--space
|
||||
justify-content: space-between
|
||||
|
||||
&.o-grid--nowrap
|
||||
flex-wrap: nowrap
|
||||
|
||||
|
||||
//- Grid column
|
||||
|
||||
|
@ -22,7 +31,6 @@
|
|||
$grid-gutter: 2rem
|
||||
|
||||
margin-top: $grid-gutter
|
||||
overflow: hidden
|
||||
|
||||
@include breakpoint(min, lg)
|
||||
display: flex
|
||||
|
|
|
@ -12,6 +12,7 @@ body
|
|||
animation: fadeIn 0.25s ease
|
||||
background: $color-back
|
||||
color: $color-front
|
||||
//scroll-behavior: smooth
|
||||
|
||||
|
||||
//- Paragraphs
|
||||
|
@ -19,6 +20,9 @@ body
|
|||
p
|
||||
@extend .o-block, .u-text
|
||||
|
||||
p:empty
|
||||
margin-bottom: 0
|
||||
|
||||
|
||||
//- Links
|
||||
|
||||
|
|
|
@ -43,12 +43,25 @@
|
|||
position: relative
|
||||
padding: 2.5rem 0
|
||||
overflow: auto
|
||||
background: $color-subtle-light
|
||||
|
||||
.o-main &
|
||||
border-top-left-radius: $border-radius
|
||||
|
||||
|
||||
//- Blocks
|
||||
|
||||
.o-section
|
||||
width: 100%
|
||||
max-width: 100%
|
||||
|
||||
&:not(:last-child)
|
||||
margin-bottom: 7rem
|
||||
padding-bottom: 4rem
|
||||
border-bottom: 1px dotted $color-subtle
|
||||
|
||||
.o-block
|
||||
margin-bottom: 3rem
|
||||
margin-bottom: 4rem
|
||||
|
||||
.o-block-small
|
||||
margin-bottom: 2rem
|
||||
|
@ -58,17 +71,18 @@
|
|||
|
||||
.o-card
|
||||
background: $color-back
|
||||
border-radius: 2px
|
||||
border: 1px solid $color-subtle
|
||||
padding: 3rem 2.5%
|
||||
|
||||
border-radius: $border-radius
|
||||
box-shadow: $box-shadow
|
||||
|
||||
//- Box
|
||||
|
||||
.o-box
|
||||
background: $color-theme-light
|
||||
background: $color-subtle-light
|
||||
padding: 2rem
|
||||
border-left: 4px solid $color-theme
|
||||
border-radius: $border-radius
|
||||
|
||||
.o-box__logos
|
||||
padding-bottom: 1rem
|
||||
|
||||
|
||||
//- Icons
|
||||
|
@ -77,7 +91,14 @@
|
|||
vertical-align: middle
|
||||
|
||||
&.o-icon--inline
|
||||
margin: 0 0.5rem 0 0.25rem
|
||||
margin: 0 0.5rem 0 0.1rem
|
||||
|
||||
.o-emoji
|
||||
margin-right: 0.75rem
|
||||
vertical-align: text-bottom
|
||||
|
||||
.o-badge
|
||||
border-radius: 1em
|
||||
|
||||
|
||||
//- SVG
|
||||
|
@ -102,3 +123,45 @@
|
|||
fill: currentColor
|
||||
vertical-align: middle
|
||||
margin: 0 0.5rem
|
||||
|
||||
|
||||
//- Embeds
|
||||
|
||||
.o-chart
|
||||
max-width: 100%
|
||||
|
||||
.cp_embed_iframe
|
||||
border: 1px solid $color-subtle
|
||||
border-radius: $border-radius
|
||||
|
||||
|
||||
//- Form fields
|
||||
|
||||
.o-field
|
||||
background: $color-back
|
||||
padding: 0 0.25em
|
||||
border-radius: 2em
|
||||
border: 1px solid $color-subtle
|
||||
margin-bottom: 0.25rem
|
||||
|
||||
.o-field__input,
|
||||
.o-field__button
|
||||
padding: 0 0.35em
|
||||
|
||||
.o-field__input
|
||||
width: 100%
|
||||
|
||||
.o-field__select
|
||||
background: transparent
|
||||
color: $color-dark
|
||||
height: 1.4em
|
||||
border: none
|
||||
text-align-last: center
|
||||
|
||||
.o-empty:empty:before
|
||||
@include size(1em)
|
||||
border-radius: 50%
|
||||
content: ""
|
||||
display: inline-block
|
||||
background: $color-red
|
||||
vertical-align: middle
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
//- 💫 CSS > BASE > RESET
|
||||
|
||||
*
|
||||
*, *:before, *:after
|
||||
box-sizing: border-box
|
||||
padding: 0
|
||||
margin: 0
|
||||
|
@ -94,7 +94,10 @@ ul, ol
|
|||
|
||||
input, button
|
||||
appearance: none
|
||||
background: transparent
|
||||
|
||||
button
|
||||
background: transparent
|
||||
cursor: pointer
|
||||
|
||||
progress
|
||||
appearance: none
|
||||
|
|
|
@ -2,38 +2,53 @@
|
|||
|
||||
//- Text
|
||||
|
||||
.u-text,
|
||||
.u-text-small,
|
||||
.u-text-tiny
|
||||
font-family: $font-primary
|
||||
|
||||
.u-text
|
||||
font: 1.5rem/#{1.55} $font-primary
|
||||
font-size: 1.35rem
|
||||
line-height: 1.5
|
||||
|
||||
.u-text-small
|
||||
font: 1.4rem/#{1.375} $font-primary
|
||||
font-size: 1.3rem
|
||||
line-height: 1.375
|
||||
|
||||
.u-text-tiny
|
||||
font: 1.1rem/#{1.375} $font-primary
|
||||
|
||||
font-size: 1.1rem
|
||||
line-height: 1.375
|
||||
|
||||
//- Labels & Tags
|
||||
|
||||
.u-text-label
|
||||
font: normal 600 1.4rem/#{1.5} $font-code
|
||||
font: normal 600 1.4rem/#{1.5} $font-secondary
|
||||
text-transform: uppercase
|
||||
|
||||
&.u-text-label--light,
|
||||
&.u-text-label--dark
|
||||
display: inline-block
|
||||
border-radius: 1em
|
||||
padding: 0 1rem 0.15rem
|
||||
|
||||
&.u-text-label--dark
|
||||
background: $color-dark
|
||||
box-shadow: inset 1px 1px 1px rgba($color-front, 0.25)
|
||||
color: $color-back
|
||||
padding: 0 0.75rem
|
||||
margin: 1.5rem 0 0 2rem
|
||||
border-radius: 2px
|
||||
|
||||
&.u-text-label--light
|
||||
background: $color-back
|
||||
color: $color-theme
|
||||
margin-bottom: 1rem
|
||||
|
||||
.u-text-tag
|
||||
display: inline-block
|
||||
font: 600 1.1rem/#{1} $font-code
|
||||
font: 600 1.1rem/#{1} $font-secondary
|
||||
background: $color-theme
|
||||
color: $color-back
|
||||
padding: 0.15em 0.25em
|
||||
border-radius: 2px
|
||||
padding: 0.15em 0.5em 0.35em
|
||||
border-radius: 1em
|
||||
text-transform: uppercase
|
||||
vertical-align: middle
|
||||
|
||||
|
@ -45,7 +60,7 @@
|
|||
//- Headings
|
||||
|
||||
.u-heading
|
||||
margin-bottom: 2rem
|
||||
margin-bottom: 1em
|
||||
|
||||
@include breakpoint(max, md)
|
||||
word-wrap: break-word
|
||||
|
@ -53,12 +68,29 @@
|
|||
&:not(:first-child)
|
||||
padding-top: 3.5rem
|
||||
|
||||
&.u-heading--title:after
|
||||
content: ""
|
||||
display: block
|
||||
width: 10%
|
||||
min-width: 6rem
|
||||
height: 6px
|
||||
background: $color-theme
|
||||
margin-top: 3rem
|
||||
|
||||
.u-heading-0
|
||||
font: normal bold 7rem/#{1} $font-primary
|
||||
font: normal 600 7rem/#{1} $font-secondary
|
||||
|
||||
@include breakpoint(max, sm)
|
||||
font-size: 6rem
|
||||
|
||||
|
||||
@each $level, $size in $headings
|
||||
.u-heading-#{$level}
|
||||
font: normal bold #{$size}rem/#{1.25} $font-primary
|
||||
font: normal 500 #{$size}rem/#{1.1} $font-secondary
|
||||
|
||||
.u-heading__teaser
|
||||
margin-top: 2rem
|
||||
font-weight: normal
|
||||
|
||||
|
||||
//- Links
|
||||
|
@ -66,31 +98,59 @@
|
|||
.u-link
|
||||
color: $color-theme
|
||||
border-bottom: 1px solid
|
||||
transition: color 0.2s ease
|
||||
|
||||
&:hover
|
||||
color: $color-theme-dark
|
||||
|
||||
.u-hide-link.u-hide-link
|
||||
border: none
|
||||
color: inherit
|
||||
|
||||
&:hover
|
||||
color: inherit
|
||||
|
||||
.u-permalink
|
||||
position: relative
|
||||
|
||||
&:before
|
||||
content: "\00b6"
|
||||
font-size: 0.9em
|
||||
font-weight: normal
|
||||
color: $color-subtle
|
||||
@include position(absolute, top, left, 0.15em, -2.85rem)
|
||||
opacity: 0
|
||||
transition: opacity 0.2s ease
|
||||
|
||||
&:hover:before
|
||||
opacity: 1
|
||||
|
||||
&:active:before
|
||||
color: $color-theme
|
||||
|
||||
&:target
|
||||
display: inline-block
|
||||
|
||||
&:before
|
||||
bottom: 0.15em
|
||||
top: initial
|
||||
|
||||
|
||||
[id]:target
|
||||
padding-top: $nav-height * 1.25
|
||||
|
||||
& + *
|
||||
margin-top: $nav-height * 1.25
|
||||
|
||||
.u-permalink__icon
|
||||
@include position(absolute, bottom, left, 0.35em, -2.75rem)
|
||||
@include size(1.5rem)
|
||||
color: $color-subtle
|
||||
|
||||
.u-permalink:hover &
|
||||
color: $color-subtle-dark
|
||||
|
||||
.u-permalink:active &
|
||||
color: $color-theme
|
||||
|
||||
|
||||
//- Layout
|
||||
|
||||
.u-float-left
|
||||
float: left
|
||||
margin-right: 1rem
|
||||
|
||||
.u-float-right
|
||||
float: right
|
||||
margin-left: 1rem
|
||||
|
||||
.u-text-center
|
||||
text-align: center
|
||||
|
||||
|
@ -104,14 +164,20 @@
|
|||
padding: 0.5em 0.75em
|
||||
|
||||
.u-padding-medium
|
||||
padding: 2.5rem
|
||||
padding: 1.8rem
|
||||
|
||||
.u-inline-block
|
||||
display: inline-block
|
||||
|
||||
.u-flex-full
|
||||
flex: 1
|
||||
|
||||
.u-nowrap
|
||||
white-space: nowrap
|
||||
|
||||
.u-wrap
|
||||
white-space: pre-wrap
|
||||
|
||||
.u-break.u-break
|
||||
word-wrap: break-word
|
||||
white-space: initial
|
||||
|
@ -123,13 +189,10 @@
|
|||
border: 1px solid $color-subtle
|
||||
border-radius: 2px
|
||||
|
||||
.u-border-bottom
|
||||
border: 1px solid $color-subtle
|
||||
|
||||
.u-border-dotted
|
||||
border-top: 1px dotted $color-subtle
|
||||
border-bottom: 1px dotted $color-subtle
|
||||
|
||||
@each $name, $color in (theme: $color-theme, subtle: $color-subtle-dark, light: $color-back, red: $color-red, green: $color-green, yellow: $color-yellow)
|
||||
@each $name, $color in (theme: $color-theme, dark: $color-dark, subtle: $color-subtle-dark, light: $color-back, red: $color-red, green: $color-green, yellow: $color-yellow)
|
||||
.u-color-#{$name}
|
||||
color: $color
|
||||
|
||||
|
@ -145,6 +208,32 @@
|
|||
background: $pattern
|
||||
|
||||
|
||||
//- Loaders
|
||||
|
||||
.u-loading,
|
||||
[data-loading]
|
||||
$spinner-size: 75px
|
||||
$spinner-bar: 8px
|
||||
|
||||
position: relative
|
||||
|
||||
& > *
|
||||
opacity: 0.35
|
||||
|
||||
&:before
|
||||
@include position(absolute, top, left, 0, 0)
|
||||
@include size($spinner-size)
|
||||
right: 0
|
||||
bottom: 0
|
||||
margin: auto
|
||||
content: ""
|
||||
border: $spinner-bar solid $color-subtle
|
||||
border-right: $spinner-bar solid $color-theme
|
||||
border-radius: 50%
|
||||
animation: rotate 1s linear infinite
|
||||
z-index: 10
|
||||
|
||||
|
||||
//- Hidden elements
|
||||
|
||||
.u-hidden
|
||||
|
|
|
@ -10,6 +10,8 @@
|
|||
|
||||
.c-aside__content
|
||||
background: $color-front
|
||||
border-top-left-radius: $border-radius
|
||||
border-bottom-left-radius: $border-radius
|
||||
z-index: 10
|
||||
|
||||
@include breakpoint(min, md)
|
||||
|
@ -21,12 +23,12 @@
|
|||
&:after
|
||||
$triangle-size: 2rem
|
||||
|
||||
@include position(absolute, bottom, left, -$triangle-size / 2, 0)
|
||||
@include position(absolute, bottom, left, -$triangle-size / 2, $border-radius / 2)
|
||||
@include size(0)
|
||||
border-color: transparent
|
||||
border-style: solid
|
||||
border-top-color: $color-dark
|
||||
border-width: $triangle-size / 2 0 0 $triangle-size
|
||||
border-width: $triangle-size / 2 0 0 calc(#{$triangle-size} - #{$border-radius / 2})
|
||||
content: ""
|
||||
|
||||
@include breakpoint(max, sm)
|
||||
|
|
|
@ -3,23 +3,50 @@
|
|||
.c-button
|
||||
display: inline-block
|
||||
font-weight: bold
|
||||
padding: 0.75em 1em
|
||||
padding: 0.8em 1.1em 1em
|
||||
margin-bottom: 1px
|
||||
border: 2px solid
|
||||
border-radius: 2px
|
||||
border: 2px solid $color-theme
|
||||
border-radius: 2em
|
||||
text-align: center
|
||||
transition: background 0.25s ease
|
||||
transition: background-color, color 0.25s ease
|
||||
|
||||
&:hover
|
||||
border-color: $color-theme-dark
|
||||
|
||||
&.c-button--small
|
||||
font-size: 1.1rem
|
||||
padding: 0.65rem 1.1rem 0.825rem
|
||||
|
||||
&.c-button--primary
|
||||
background: $color-theme
|
||||
color: $color-back
|
||||
border-color: $color-theme
|
||||
|
||||
&:hover
|
||||
background: $color-theme-dark
|
||||
border-color: $color-theme-dark
|
||||
|
||||
&.c-button--secondary
|
||||
background: $color-back
|
||||
color: $color-theme
|
||||
border-color: $color-theme
|
||||
|
||||
&:hover
|
||||
color: $color-theme-dark
|
||||
|
||||
&.c-button--secondary-light
|
||||
background: transparent
|
||||
color: $color-back
|
||||
border-color: $color-back
|
||||
|
||||
.c-icon-button
|
||||
@include size(35px)
|
||||
background: $color-subtle-light
|
||||
color: $color-subtle-dark
|
||||
border-radius: 50%
|
||||
padding: 0.5rem
|
||||
transition: color 0.2s ease
|
||||
|
||||
&:hover
|
||||
color: $color-theme
|
||||
|
||||
&.c-icon-button--right
|
||||
float: right
|
||||
margin-left: 3rem
|
||||
|
|
|
@ -24,9 +24,9 @@
|
|||
transform: translateX(110%)
|
||||
|
||||
&:before
|
||||
@include position(absolute, top, left, 1rem, 2rem)
|
||||
@include position(absolute, top, left, 1.25rem, 2rem)
|
||||
content: attr(data-title)
|
||||
font: bold 1.4rem $font-code
|
||||
font: bold 1.4rem $font-secondary
|
||||
text-transform: uppercase
|
||||
color: $color-back
|
||||
|
||||
|
@ -88,13 +88,18 @@
|
|||
background-image: url(data:image/svg+xml;base64,PHN2ZyB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHdpZHRoPSIyNCIgaGVpZ2h0PSIyNCIgdmlld0JveD0iMCAwIDI0IDI0Ij48cGF0aCBmaWxsPSIjZmZmIiBkPSJNMTguOTg0IDYuNDIybC01LjU3OCA1LjU3OCA1LjU3OCA1LjU3OC0xLjQwNiAxLjQwNi01LjU3OC01LjU3OC01LjU3OCA1LjU3OC0xLjQwNi0xLjQwNiA1LjU3OC01LjU3OC01LjU3OC01LjU3OCAxLjQwNi0xLjQwNiA1LjU3OCA1LjU3OCA1LjU3OC01LjU3OHoiPjwvcGF0aD48L3N2Zz4=)
|
||||
|
||||
.c-chat__button
|
||||
@include position(fixed, bottom, right, 0, 2rem)
|
||||
padding: 1rem 1.5rem
|
||||
background: $color-front
|
||||
@include position(fixed, bottom, right, 1.5rem, 1.5rem)
|
||||
z-index: 5
|
||||
color: $color-back
|
||||
border-top-left-radius: 4px
|
||||
border-top-right-radius: 4px
|
||||
z-index: 20
|
||||
border-color: $color-theme
|
||||
border-style: solid
|
||||
border-width: 1px 1px 0 1px
|
||||
background: $color-front
|
||||
border-radius: 1em
|
||||
padding: 0.5rem 1.15rem 0.35rem
|
||||
opacity: 0.7
|
||||
transition: opacity 0.2s ease
|
||||
|
||||
&:hover
|
||||
opacity: 1
|
||||
|
||||
|
||||
.gitter-open-chat-button
|
||||
display: none
|
||||
|
|
|
@ -4,9 +4,9 @@
|
|||
|
||||
.c-code-block
|
||||
background: $color-front
|
||||
color: $color-back
|
||||
color: darken($color-back, 20)
|
||||
padding: 0.75em 0
|
||||
border-radius: 2px
|
||||
border-radius: $border-radius
|
||||
overflow: auto
|
||||
width: 100%
|
||||
max-width: 100%
|
||||
|
@ -16,6 +16,8 @@
|
|||
&.c-code-block--has-icon
|
||||
padding: 0
|
||||
display: flex
|
||||
border-top-left-radius: 0
|
||||
border-bottom-left-radius: 0
|
||||
|
||||
.c-code-block__icon
|
||||
padding: 0 0 0 1rem
|
||||
|
@ -43,17 +45,26 @@
|
|||
opacity: 0.5
|
||||
|
||||
|
||||
//- Code
|
||||
|
||||
code
|
||||
-webkit-font-smoothing: subpixel-antialiased
|
||||
-moz-osx-font-smoothing: auto
|
||||
|
||||
|
||||
//- Inline code
|
||||
|
||||
*:not(a):not(.c-code-block) > code
|
||||
color: $color-dark
|
||||
|
||||
*:not(.c-code-block) > code
|
||||
font: normal 600 0.8em/#{1} $font-code
|
||||
background: darken($color-theme-light, 5)
|
||||
box-shadow: 1px 1px 0 rgba($color-front, 0.05)
|
||||
text-shadow: 1px 1px 0 rgba($color-back, 0.5)
|
||||
color: $color-front
|
||||
padding: 0.1em 0.5em
|
||||
font-size: 90%
|
||||
background-color: $color-subtle-light
|
||||
padding: 0.2rem 0.4rem
|
||||
border-radius: 0.25rem
|
||||
font-family: $font-code
|
||||
white-space: nowrap
|
||||
margin: 0
|
||||
border-radius: 1px
|
||||
box-decoration-break: clone
|
||||
white-space: nowrap
|
||||
|
||||
|
|
|
@ -2,12 +2,11 @@
|
|||
|
||||
.c-landing
|
||||
background: $color-theme
|
||||
padding-top: 5rem
|
||||
padding-top: $nav-height * 1.5
|
||||
width: 100%
|
||||
|
||||
.c-landing__wrapper
|
||||
background: $pattern
|
||||
padding-bottom: 6rem
|
||||
width: 100%
|
||||
|
||||
.c-landing__content
|
||||
|
@ -15,9 +14,45 @@
|
|||
width: 100%
|
||||
min-height: 573px
|
||||
|
||||
.c-landing__headlines
|
||||
position: relative
|
||||
top: -1.5rem
|
||||
left: 1rem
|
||||
|
||||
.c-landing__title
|
||||
color: $color-back
|
||||
text-align: center
|
||||
margin-bottom: 0.75rem
|
||||
|
||||
.c-landing__blocks
|
||||
@include breakpoint(min, sm)
|
||||
position: relative
|
||||
top: -25rem
|
||||
margin-bottom: -25rem
|
||||
|
||||
.c-landing__card
|
||||
padding: 3rem 2.5rem
|
||||
|
||||
.c-landing__banner
|
||||
background: $color-theme
|
||||
|
||||
.c-landing__banner__content
|
||||
@include breakpoint(min, md)
|
||||
border: 4px solid
|
||||
padding: 1rem 6.5rem 2rem 4rem
|
||||
|
||||
|
||||
.c-landing__banner__text
|
||||
font-weight: 500
|
||||
|
||||
strong
|
||||
font-weight: 800
|
||||
|
||||
p
|
||||
font-size: 1.5rem
|
||||
|
||||
@include breakpoint(min, md)
|
||||
padding-top: 7rem
|
||||
|
||||
.c-landing__badge
|
||||
transform: rotate(7deg)
|
||||
|
|
|
@ -9,6 +9,8 @@
|
|||
|
||||
.c-list__item:before
|
||||
content: counter(li, #{$counter}) '.'
|
||||
font-size: 1em
|
||||
padding-right: 1rem
|
||||
|
||||
|
||||
//- List Item
|
||||
|
@ -21,13 +23,14 @@
|
|||
&:before
|
||||
content: '\25CF'
|
||||
display: inline-block
|
||||
font-size: 1em
|
||||
font-size: 0.6em
|
||||
font-weight: bold
|
||||
padding-right: 1.25rem
|
||||
margin-left: -3.75rem
|
||||
text-align: right
|
||||
width: 2.5rem
|
||||
counter-increment: li
|
||||
box-sizing: content-box
|
||||
|
||||
|
||||
//- List icon
|
||||
|
|
|
@ -3,9 +3,8 @@
|
|||
.x-terminal
|
||||
background: $color-subtle-light
|
||||
color: $color-front
|
||||
padding: 4px
|
||||
border: 1px dotted $color-subtle
|
||||
border-radius: 5px
|
||||
padding: $border-radius
|
||||
border-radius: 1em
|
||||
width: 100%
|
||||
|
||||
.x-terminal__icons
|
||||
|
|
|
@ -1,22 +1,21 @@
|
|||
//- 💫 CSS > COMPONENTS > NAVIGATION
|
||||
|
||||
.c-nav
|
||||
@include position(absolute, top, left, 0, 0)
|
||||
@include position(fixed, top, left, 0, 0)
|
||||
@include size(100%, $nav-height)
|
||||
background: $color-back
|
||||
color: $color-theme
|
||||
align-items: center
|
||||
display: flex
|
||||
justify-content: space-between
|
||||
flex-flow: row wrap
|
||||
padding: 0 2rem 0 1rem
|
||||
z-index: 20
|
||||
z-index: 30
|
||||
width: 100%
|
||||
border-bottom: 1px solid $color-subtle
|
||||
box-shadow: $box-shadow
|
||||
|
||||
&.c-nav--theme
|
||||
background: $color-theme
|
||||
color: $color-back
|
||||
border-bottom: none
|
||||
//@include breakpoint(min, md)
|
||||
// position: fixed
|
||||
|
||||
&.is-fixed
|
||||
animation: slideInDown 0.5s ease-in-out
|
||||
|
@ -28,12 +27,21 @@
|
|||
justify-content: flex-end
|
||||
flex-flow: row nowrap
|
||||
border-color: inherit
|
||||
flex: 1
|
||||
|
||||
.c-nav__menu__item
|
||||
display: flex
|
||||
align-items: center
|
||||
height: 100%
|
||||
text-transform: uppercase
|
||||
font-family: $font-secondary
|
||||
font-size: 1.6rem
|
||||
font-weight: bold
|
||||
color: $color-theme
|
||||
|
||||
&:not(:last-child)
|
||||
margin-right: 1em
|
||||
&:not(:first-child)
|
||||
margin-left: 2em
|
||||
|
||||
&.is-active
|
||||
color: $color-dark
|
||||
pointer-events: none
|
||||
|
|
24
website/assets/css/_components/_progress.sass
Normal file
24
website/assets/css/_components/_progress.sass
Normal file
|
@ -0,0 +1,24 @@
|
|||
//- 💫 CSS > COMPONENTS > PROGRESS
|
||||
|
||||
.c-progress
|
||||
display: block
|
||||
flex: 105%
|
||||
width: 105%
|
||||
height: 3px
|
||||
color: $color-theme
|
||||
background: transparent
|
||||
border: none
|
||||
position: absolute
|
||||
bottom: 0
|
||||
left: -2.5%
|
||||
|
||||
&::-webkit-progress-bar
|
||||
background: $color-back
|
||||
border-radius: none
|
||||
|
||||
&::-webkit-progress-value
|
||||
background: $color-theme
|
||||
border-radius: none
|
||||
|
||||
&::-moz-progress-bar
|
||||
background: $color-theme
|
|
@ -1,14 +1,17 @@
|
|||
//- 💫 CSS > COMPONENTS > QUICKSTART
|
||||
|
||||
.c-quickstart
|
||||
border: 1px solid $color-subtle
|
||||
border-radius: 2px
|
||||
border-radius: $border-radius
|
||||
display: none
|
||||
background: $color-subtle-light
|
||||
|
||||
&:not([style]) + .c-quickstart__info
|
||||
display: none
|
||||
|
||||
.c-code-block
|
||||
border-top-left-radius: 0
|
||||
border-top-right-radius: 0
|
||||
|
||||
.c-quickstart__content
|
||||
padding: 2rem 3rem
|
||||
|
||||
|
@ -72,7 +75,6 @@
|
|||
flex: 100%
|
||||
|
||||
.c-quickstart__legend
|
||||
color: $color-subtle-dark
|
||||
margin-right: 2rem
|
||||
padding-top: 0.75rem
|
||||
flex: 1 1 35%
|
||||
|
@ -95,4 +97,4 @@
|
|||
padding: 1.5rem 0
|
||||
|
||||
.c-quickstart__code
|
||||
font-size: 1.6rem
|
||||
font-size: 1.4rem
|
||||
|
|
|
@ -3,16 +3,15 @@
|
|||
//- Sidebar container
|
||||
|
||||
.c-sidebar
|
||||
background: $color-subtle-light
|
||||
overflow-y: auto
|
||||
|
||||
@include breakpoint(min, md)
|
||||
@include position(fixed, top, left, 0, 0)
|
||||
@include size($sidebar-width, 100vh)
|
||||
@include size($sidebar-width, calc(100vh - 3px))
|
||||
@include scroll-shadow($color-back, $color-front, $nav-height)
|
||||
flex: 0 0 $sidebar-width
|
||||
padding: calc(#{$nav-height} + 1.5rem) 0 0
|
||||
z-index: 10
|
||||
border-right: 1px solid $color-subtle
|
||||
|
||||
@include breakpoint(max, sm)
|
||||
flex: 100%
|
||||
|
@ -27,7 +26,7 @@
|
|||
|
||||
.c-sidebar__section
|
||||
& > *
|
||||
padding: 0 2rem
|
||||
padding: 0 2rem 0.35rem
|
||||
|
||||
@include breakpoint(max, sm)
|
||||
flex: 1 1 0
|
||||
|
@ -38,7 +37,59 @@
|
|||
&:not(:last-child)
|
||||
border-right: 1px solid $color-subtle
|
||||
|
||||
.is-active
|
||||
font-weight: bold
|
||||
.c-sidebar__item
|
||||
color: $color-theme
|
||||
background: rgba($color-subtle, 0.4)
|
||||
|
||||
&:hover
|
||||
color: $color-theme-dark
|
||||
|
||||
& > .is-active
|
||||
font-weight: bold
|
||||
color: $color-dark
|
||||
margin-top: 1rem
|
||||
|
||||
|
||||
//- Sidebar subsections
|
||||
|
||||
$crumb-bullet: 14px
|
||||
$crumb-bar: 2px
|
||||
|
||||
.c-sidebar__crumb
|
||||
display: block
|
||||
padding-top: 1rem
|
||||
padding-left: 1rem
|
||||
position: relative
|
||||
|
||||
.c-sidebar__crumb__item
|
||||
margin-bottom: $crumb-bullet / 2
|
||||
position: relative
|
||||
padding-left: 2rem
|
||||
color: $color-theme
|
||||
font-size: 1.2rem
|
||||
|
||||
&:hover
|
||||
color: $color-theme-dark
|
||||
|
||||
&:after
|
||||
@include size($crumb-bullet)
|
||||
@include position(absolute, top, left, $crumb-bullet / 4, 0)
|
||||
content: ""
|
||||
border-radius: 50%
|
||||
background: $color-theme
|
||||
z-index: 10
|
||||
|
||||
&:not(:last-child):before
|
||||
@include size($crumb-bar, 100%)
|
||||
@include position(absolute, top, left, $crumb-bullet, ($crumb-bullet - $crumb-bar) / 2)
|
||||
content: ""
|
||||
background: $color-subtle
|
||||
|
||||
&:first-child:before
|
||||
height: calc(100% + #{$crumb-bullet * 2})
|
||||
top: -$crumb-bullet / 2
|
||||
|
||||
&.is-active
|
||||
color: $color-dark
|
||||
|
||||
&:after
|
||||
background: $color-dark
|
||||
|
|
|
@ -9,7 +9,7 @@
|
|||
//- Table row
|
||||
|
||||
.c-table__row
|
||||
&:nth-child(odd)
|
||||
&:nth-child(odd):not(.c-table__row--head)
|
||||
background: rgba($color-subtle-light, 0.35)
|
||||
|
||||
&.c-table__row--foot
|
||||
|
@ -38,7 +38,6 @@
|
|||
.c-table__head-cell
|
||||
font-weight: bold
|
||||
color: $color-theme
|
||||
background: $color-back
|
||||
padding: 1rem 0.5rem
|
||||
border-bottom: 2px solid $color-theme
|
||||
|
||||
|
|
|
@ -4,24 +4,34 @@
|
|||
position: relative
|
||||
|
||||
@include breakpoint(min, sm)
|
||||
&[data-tooltip-style="code"]:before
|
||||
-webkit-font-smoothing: subpixel-antialiased
|
||||
-moz-osx-font-smoothing: auto
|
||||
padding: 0.35em 0.85em 0.45em
|
||||
font: normal 1rem/#{1.25} $font-code
|
||||
white-space: nowrap
|
||||
min-width: auto
|
||||
|
||||
&:before
|
||||
@include position(absolute, top, left, 125%, 50%)
|
||||
display: inline-block
|
||||
content: attr(data-tooltip)
|
||||
background: $color-front
|
||||
border-radius: 2px
|
||||
border-radius: $border-radius
|
||||
border: 1px solid rgba($color-subtle-dark, 0.5)
|
||||
color: $color-back
|
||||
font: normal 1.3rem/#{1.25} $font-primary
|
||||
font: normal 1.2rem/#{1.25} $font-primary
|
||||
text-transform: none
|
||||
text-align: left
|
||||
opacity: 0
|
||||
padding: 0.5em 0.75em
|
||||
transform: translateX(-50%) translateY(-2px)
|
||||
transition: opacity 0.1s ease-out, transform 0.1s ease-out
|
||||
visibility: hidden
|
||||
min-width: 200px
|
||||
max-width: 300px
|
||||
min-width: 200px
|
||||
padding: 0.75em 1em 1em
|
||||
z-index: 200
|
||||
white-space: pre-wrap
|
||||
|
||||
&:hover:before
|
||||
opacity: 1
|
||||
|
|
|
@ -42,8 +42,8 @@
|
|||
// $scroll-shadow-side - side to cover shadow (left or right)
|
||||
// $scroll-shadow-background - original background color to match
|
||||
|
||||
@mixin scroll-shadow-base($scroll-shadow-color)
|
||||
background: radial-gradient(left, ellipse, rgba(0,0,0, .2) 0%, rgba(0,0,0, 0) 75%) 0 center, radial-gradient(right, ellipse, rgba(0,0,0, .2) 0%, rgba(0,0,0, 0) 75%) 100% center
|
||||
@mixin scroll-shadow-base($scroll-shadow-color, $scroll-shadow-intensity: 0.2)
|
||||
background: radial-gradient(ellipse at 0 50%, rgba($scroll-shadow-color, $scroll-shadow-intensity) 0%, rgba(0,0,0,0) 75%) 0 center, radial-gradient(ellipse at 100% 50%, rgba($scroll-shadow-color, $scroll-shadow-intensity) 0%, transparent 75%) 100% center
|
||||
background-attachment: scroll, scroll
|
||||
background-repeat: no-repeat
|
||||
background-size: 10px 100%, 10px 100%
|
||||
|
@ -58,3 +58,16 @@
|
|||
background-image: linear-gradient(to #{$scroll-gradient-direction}, rgba($scroll-shadow-background, 1) 50%, rgba($scroll-shadow-background, 0) 100%)
|
||||
background-repeat: no-repeat
|
||||
background-size: 20px 100%
|
||||
|
||||
|
||||
// Full vertical scroll shadows
|
||||
// adapted from: https://codepen.io/laustdeleuran/pen/DBaAu
|
||||
|
||||
@mixin scroll-shadow($background-color, $shadow-color, $shadow-offset: 0, $shadow-intensity: 0.4, $cover-size: 40px, $shadow-size: 15px)
|
||||
background: linear-gradient($background-color 30%, rgba($background-color,0)) 0 $shadow-offset, linear-gradient(rgba($background-color,0), $background-color 70%) 0 100%, radial-gradient(50% 0, farthest-side, rgba($shadow-color,$shadow-intensity), rgba($shadow-color,0)) 0 $shadow-offset, radial-gradient(50% 100%,farthest-side, rgba($shadow-color,$shadow-intensity), rgba($shadow-color,0)) 0 100%
|
||||
|
||||
background: linear-gradient($background-color 30%, rgba($background-color,0)) 0 $shadow-offset, linear-gradient(rgba($background-color,0), $background-color 70%) 0 100%, radial-gradient(farthest-side at 50% 0, rgba($shadow-color,$shadow-intensity), rgba($shadow-color,0)) -20px $shadow-offset, radial-gradient(farthest-side at 50% 100%, rgba($shadow-color, $shadow-intensity), rgba($shadow-color,0)) 0 100%
|
||||
background-repeat: no-repeat
|
||||
background-color: $background-color
|
||||
background-size: 100% $cover-size, 100% $cover-size, 100% $shadow-size, 100% $shadow-size
|
||||
background-attachment: local, local, scroll, scroll
|
||||
|
|
|
@ -4,47 +4,48 @@
|
|||
|
||||
$type-base: 11px
|
||||
|
||||
$nav-height: 45px
|
||||
$nav-height: 55px
|
||||
$content-width: 1250px
|
||||
$sidebar-width: 200px
|
||||
$aside-width: 30vw
|
||||
$sidebar-width: 235px
|
||||
$aside-width: 27.5vw
|
||||
$aside-padding: 25px
|
||||
$border-radius: 6px
|
||||
|
||||
$logo-width: 85px
|
||||
$logo-height: 27px
|
||||
|
||||
$grid: ( quarter: 4, third: 3, half: 2, two-thirds: 1.5, three-quarters: 1.33 )
|
||||
$breakpoints: ( sm: 768px, md: 992px, lg: 1200px )
|
||||
$headings: (1: 3, 2: 2.6, 3: 2, 4: 1.8, 5: 1.5)
|
||||
|
||||
$headings: (1: 4.4, 2: 3.4, 3: 2.6, 4: 2.2, 5: 1.8)
|
||||
|
||||
// Fonts
|
||||
|
||||
$font-primary: "Source Sans Pro", Tahoma, Geneva, sans-serif !default
|
||||
$font-code: 'Source Code Pro', Consolas, 'Andale Mono', Menlo, Monaco, Courier, monospace !default
|
||||
|
||||
$font-primary: -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, Helvetica, Arial, sans-serif, "Apple Color Emoji", "Segoe UI Emoji", "Segoe UI Symbol" !default
|
||||
$font-secondary: "HK Grotesk", Roboto, Helvetica, Arial, sans-serif !default
|
||||
$font-code: Menlo, Monaco, Consolas, "Liberation Mono", "Courier New", monospace !default
|
||||
|
||||
// Colors
|
||||
|
||||
$colors: ( blue: #09a3d5, red: #d9515d, green: #08c35e )
|
||||
$colors: ( blue: #09a3d5, green: #05b083 )
|
||||
|
||||
$color-back: #fff !default
|
||||
$color-front: #1a1e23 !default
|
||||
$color-dark: lighten($color-front, 20) !default
|
||||
|
||||
$color-theme: map-get($colors, $theme)
|
||||
$color-theme-dark: darken(map-get($colors, $theme), 5)
|
||||
$color-theme-dark: darken(map-get($colors, $theme), 10)
|
||||
$color-theme-light: rgba($color-theme, 0.05)
|
||||
|
||||
$color-subtle: #ddd !default
|
||||
$color-subtle-light: #f6f6f6 !default
|
||||
$color-subtle-dark: #949e9b !default
|
||||
|
||||
$color-red: #d9515d
|
||||
$color-green: #3ec930
|
||||
$color-red: #ef476f
|
||||
$color-green: #7ddf64
|
||||
$color-yellow: #f4c025
|
||||
|
||||
$syntax-highlighting: ( comment: #949e9b, tag: #b084eb, number: #b084eb, selector: #ffb86c, operator: #ff2c6d, function: #35b3dc, keyword: #ff2c6d, regex: #f4c025 )
|
||||
|
||||
$pattern: $color-theme url("/assets/img/pattern_#{$theme}.jpg") center top repeat
|
||||
$pattern-overlay: transparent url("/assets/img/pattern_landing.jpg") center -138px no-repeat
|
||||
$box-shadow: 0 1px 5px rgba(0, 0, 0, 0.2)
|
||||
|
|
|
@ -30,6 +30,7 @@ $theme: blue !default
|
|||
@import _components/lists
|
||||
@import _components/misc
|
||||
@import _components/navigation
|
||||
@import _components/progress
|
||||
@import _components/sidebar
|
||||
@import _components/tables
|
||||
@import _components/quickstart
|
||||
|
|
|
@ -1,4 +0,0 @@
|
|||
//- 💫 STYLESHEET (RED)
|
||||
|
||||
$theme: red
|
||||
@import style
|
BIN
website/assets/fonts/hkgrotesk-bold.woff
Executable file
BIN
website/assets/fonts/hkgrotesk-bold.woff
Executable file
Binary file not shown.
BIN
website/assets/fonts/hkgrotesk-bold.woff2
Executable file
BIN
website/assets/fonts/hkgrotesk-bold.woff2
Executable file
Binary file not shown.
BIN
website/assets/fonts/hkgrotesk-bolditalic.woff
Executable file
BIN
website/assets/fonts/hkgrotesk-bolditalic.woff
Executable file
Binary file not shown.
BIN
website/assets/fonts/hkgrotesk-bolditalic.woff2
Executable file
BIN
website/assets/fonts/hkgrotesk-bolditalic.woff2
Executable file
Binary file not shown.
BIN
website/assets/fonts/hkgrotesk-semibold.woff
Executable file
BIN
website/assets/fonts/hkgrotesk-semibold.woff
Executable file
Binary file not shown.
BIN
website/assets/fonts/hkgrotesk-semibold.woff2
Executable file
BIN
website/assets/fonts/hkgrotesk-semibold.woff2
Executable file
Binary file not shown.
BIN
website/assets/fonts/hkgrotesk-semibolditalic.woff
Executable file
BIN
website/assets/fonts/hkgrotesk-semibolditalic.woff
Executable file
Binary file not shown.
BIN
website/assets/fonts/hkgrotesk-semibolditalic.woff2
Executable file
BIN
website/assets/fonts/hkgrotesk-semibolditalic.woff2
Executable file
Binary file not shown.
Binary file not shown.
|
@ -1,244 +0,0 @@
|
|||
<?xml version="1.0" standalone="no"?>
|
||||
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd" >
|
||||
<svg xmlns="http://www.w3.org/2000/svg">
|
||||
<metadata></metadata>
|
||||
<defs>
|
||||
<font id="source_code_prosemibold" horiz-adv-x="1228" >
|
||||
<font-face units-per-em="2048" ascent="1536" descent="-512" />
|
||||
<missing-glyph horiz-adv-x="500" />
|
||||
<glyph horiz-adv-x="0" />
|
||||
<glyph horiz-adv-x="682" />
|
||||
<glyph horiz-adv-x="0" />
|
||||
<glyph horiz-adv-x="0" />
|
||||
<glyph unicode="
" />
|
||||
<glyph unicode=" " />
|
||||
<glyph unicode="!" d="M436 160q0 82 51.5 131t126.5 49q76 0 127.5 -49t51.5 -131q0 -84 -51.5 -134.5t-127.5 -50.5t-127 50.5t-51 134.5zM498 1372h233l-6 -233l-33 -639h-155l-33 639z" />
|
||||
<glyph unicode=""" d="M219 1405h287l-6 -262l-62 -473h-151l-62 473zM725 1405h287l-6 -262l-62 -473h-151l-62 473z" />
|
||||
<glyph unicode="#" d="M160 399v156h176l33 264h-168v156h186l47 356h139l-43 -356h242l45 356h141l-43 -356h162v-156h-180l-33 -264h172v-156h-190l-49 -399h-144l49 399h-241l-47 -399h-144l47 399h-157zM479 555h242l33 264h-242z" />
|
||||
<glyph unicode="$" d="M152 233l106 162q88 -59 178 -96t197 -37q98 0 148 37t50 98q0 53 -46 88t-115.5 65t-149.5 59.5t-149.5 72.5t-115.5 103.5t-46 152.5q0 133 90 217t244 105v268h164v-266q109 -12 185.5 -55.5t139.5 -106.5l-119 -137q-72 53 -136 81.5t-159 28.5q-86 0 -134 -34.5 t-48 -96.5q0 -47 46 -77.5t116 -58.5t150.5 -57.5t150.5 -72.5t116 -105.5t46 -156.5q0 -131 -93.5 -222.5t-260.5 -115.5v-301h-164v297q-106 10 -211 54t-180 107z" />
|
||||
<glyph unicode="%" d="M47 182l379 371l86 -80l-338 -412zM53 991q0 80 23 142.5t61.5 105.5t93 65.5t117.5 22.5t117.5 -22.5t93.5 -65.5t61.5 -105.5t22.5 -142.5t-22.5 -143t-61.5 -108t-93.5 -69t-117.5 -24t-117.5 24t-93 69t-61.5 108.5t-23 142.5zM221 991q0 -113 37 -163t90 -50 t90 50.5t37 162.5q0 115 -36.5 160t-90.5 45q-53 0 -90 -45t-37 -160zM587 319q0 80 23 142.5t61.5 105.5t93 65.5t117.5 22.5t117.5 -22.5t93.5 -65.5t61.5 -105.5t22.5 -142.5t-22.5 -143t-61.5 -108t-93.5 -69t-117.5 -24t-117.5 24t-93 69t-61.5 108.5t-23 142.5z M721 866l338 412l127 -121l-379 -371zM755 319q0 -113 37 -163t90 -50t90 50.5t37 162.5q0 115 -36.5 160t-90.5 45q-53 0 -90 -45t-37 -160z" />
|
||||
<glyph unicode="&" d="M66 352q0 68 19 122t51 99t74 83t89 71q-41 82 -64.5 159t-23.5 144q0 70 22.5 130.5t63.5 105.5t98.5 71.5t128.5 26.5q137 0 214 -78t77 -207q0 -66 -23.5 -119t-62.5 -99t-87 -87t-99 -80q59 -82 132 -159.5t152 -143.5q94 145 140 348h217q-35 -127 -84 -243.5 t-117 -221.5q57 -39 109.5 -65.5t99.5 -40.5l-59 -193q-135 37 -283 140q-74 -63 -165 -101.5t-204 -38.5q-96 0 -174 30t-131 80t-81.5 118.5t-28.5 148.5zM291 367q0 -94 62.5 -151.5t156.5 -57.5q49 0 96 20.5t90 57.5q-86 74 -162.5 158.5t-140.5 176.5 q-45 -45 -73.5 -95t-28.5 -109zM408 1032q0 -88 51 -194q72 51 122 107t50 132q0 53 -22.5 90t-82.5 37q-53 0 -85.5 -47t-32.5 -125z" />
|
||||
<glyph unicode="'" d="M471 1405h287l-6 -262l-62 -473h-151l-62 473z" />
|
||||
<glyph unicode="(" d="M397 569q0 293 120 526.5t327 405.5l131 -108q-195 -176 -285 -375t-90 -449t90 -448.5t285 -374.5l-131 -108q-207 172 -327 405t-120 526z" />
|
||||
<glyph unicode=")" d="M254 -254q195 176 285 374.5t90 448.5t-90 449t-285 375l131 108q207 -172 326.5 -405.5t119.5 -526.5t-119.5 -526t-326.5 -405z" />
|
||||
<glyph unicode="*" d="M152 754l38 120l338 -100l21 375h131l20 -375l338 100l39 -120l-323 -142l211 -329l-109 -76l-242 307l-241 -307l-109 76l211 329z" />
|
||||
<glyph unicode="+" d="M160 590v172h364v387h181v-387h364v-172h-364v-387h-181v387h-364z" />
|
||||
<glyph unicode="," d="M387 -299q260 104 262 315q-10 -2 -28 -2q-78 0 -135.5 46t-57.5 137q0 86 58.5 134t138.5 48q111 0 166 -82t55 -225q0 -188 -102.5 -318.5t-294.5 -193.5z" />
|
||||
<glyph unicode="-" d="M160 590v172h909v-172h-909z" />
|
||||
<glyph unicode="." d="M412 182q0 90 57 148.5t145 58.5t145.5 -58.5t57.5 -148.5t-57.5 -148.5t-145.5 -58.5t-145 58.5t-57 148.5z" />
|
||||
<glyph unicode="/" d="M176 -328l674 1782h203l-674 -1782h-203z" />
|
||||
<glyph unicode="0" d="M131 657q0 332 130 501t353 169t353.5 -169t130.5 -501q0 -330 -130.5 -506t-353.5 -176t-353 176.5t-130 505.5zM344 657q0 -133 20.5 -228t56.5 -154.5t85 -87t108 -27.5q57 0 107.5 27.5t86.5 87t56.5 154.5t20.5 228t-20.5 226.5t-56.5 151t-86.5 83t-107.5 25.5 q-59 0 -108 -25.5t-85 -83t-56.5 -150.5t-20.5 -227zM473 666q0 68 41 106.5t100 38.5t100.5 -39t41.5 -106q0 -68 -41.5 -107t-100.5 -39t-100 39t-41 107z" />
|
||||
<glyph unicode="1" d="M184 0v193h353v856h-273v147q111 18 189.5 44t146.5 63h172v-1110h320v-193h-908z" />
|
||||
<glyph unicode="2" d="M123 1126q94 96 198.5 148.5t247.5 52.5q100 0 182.5 -28.5t140.5 -81t90 -126t32 -163.5q0 -86 -42 -175t-114 -180.5t-171 -187.5t-212 -199q57 4 120.5 9.5t117.5 5.5h368v-201h-948v137q154 131 274.5 240.5t205.5 204t130 176.5t45 157q0 104 -63 165t-186 61 q-86 0 -157 -43t-130 -103z" />
|
||||
<glyph unicode="3" d="M102 160l111 151q66 -59 153 -101t193 -42q121 0 200 52t79 145q0 51 -22.5 92t-73 69.5t-133.5 44t-203 15.5v172q104 0 177.5 15t120 43t68 67t21.5 84q0 82 -62.5 128t-167.5 46q-86 0 -158.5 -34t-138.5 -89l-123 145q90 74 196.5 119t233.5 45q100 0 184.5 -22.5 t145 -66.5t94 -108.5t33.5 -146.5q0 -111 -71.5 -184.5t-192.5 -114.5v-8q63 -16 119.5 -44t98.5 -69t66.5 -94t24.5 -117q0 -90 -40 -159.5t-107.5 -117.5t-157.5 -74t-190 -26q-168 0 -287 53.5t-191 131.5z" />
|
||||
<glyph unicode="4" d="M76 336v160l598 809h272v-787h184v-182h-184v-336h-221v336h-649zM315 518h410v311q2 59 5 129t7 129h-10q-33 -51 -67.5 -102t-69.5 -104z" />
|
||||
<glyph unicode="5" d="M104 156l109 151q66 -55 146.5 -97t193.5 -42q123 0 206 65.5t83 184.5q0 117 -77 180.5t-202 63.5q-72 0 -123 -17.5t-118 -56.5l-113 71l41 644h768v-199h-563l-29 -316q51 20 98 31.5t107 11.5q92 0 174 -24.5t142.5 -74.5t96 -127t35.5 -181q0 -106 -41 -190 t-110.5 -141.5t-158.5 -87.5t-187 -30q-172 0 -286 54.5t-192 126.5z" />
|
||||
<glyph unicode="6" d="M139 612q0 190 46 325.5t124 221.5t177.5 127t207.5 41q129 0 226.5 -43t162.5 -104l-127 -144q-45 43 -109.5 71t-133.5 28q-70 0 -132.5 -25t-110.5 -82t-78 -150.5t-34 -228.5q76 74 166 116t176 42q88 0 162 -24.5t128 -75t85 -126t31 -178.5q0 -98 -37 -177 t-99.5 -134t-144.5 -86t-172 -31q-102 0 -195 38t-164 116t-113 199t-42 284zM367 483q23 -174 98 -250.5t186 -76.5q49 0 91 16t75 48t51.5 78t18.5 105q0 117 -62.5 174.5t-173.5 57.5q-66 0 -139.5 -34t-144.5 -118z" />
|
||||
<glyph unicode="7" d="M135 1104v199h963v-144q-127 -141 -207 -269t-127 -261t-67.5 -286t-28.5 -343h-240q8 174 33.5 318.5t74 274.5t122 254t180.5 257h-703z" />
|
||||
<glyph unicode="8" d="M131 334q0 63 21.5 114.5t58.5 91.5t85 71.5t99 56.5v8q-82 53 -138 127t-56 178q0 80 31.5 143.5t88 108.5t133 69.5t169.5 24.5q195 0 306 -96t111 -256q0 -92 -57 -168t-137 -129v-8q53 -27 99 -57.5t80 -70.5t53.5 -92.5t19.5 -119.5q0 -76 -32 -140.5t-93.5 -112.5 t-151.5 -75t-205 -27q-113 0 -202.5 27t-152 74t-96.5 112.5t-34 145.5zM340 358q0 -98 78 -156.5t203 -58.5q119 0 186 51.5t67 143.5q0 55 -27.5 94t-75.5 67.5t-113.5 52t-143.5 50.5q-76 -45 -125 -103.5t-49 -140.5zM410 981q0 -49 22.5 -86t63.5 -64.5t96 -51.5 t121 -44q129 104 129 226q0 86 -57.5 142t-166.5 56q-92 0 -150 -46t-58 -132z" />
|
||||
<glyph unicode="9" d="M123 901q0 98 37 177t98 134.5t143 85t172 29.5q104 0 197.5 -38t164.5 -115.5t111.5 -197.5t40.5 -284q0 -190 -46 -325.5t-122.5 -222.5t-177 -128t-206.5 -41q-129 0 -227.5 43t-164.5 105l127 143q47 -43 110.5 -70.5t135.5 -27.5q68 0 130.5 24.5t110.5 82 t78.5 150.5t34.5 228q-78 -72 -167 -113.5t-175 -41.5q-88 0 -162.5 24.5t-128 74.5t-84 126t-30.5 178zM340 901q0 -117 62 -175t173 -58q68 0 142 34.5t145 118.5q-23 172 -98.5 249t-185.5 77q-49 0 -92.5 -16.5t-75 -47t-51 -76.5t-19.5 -106z" />
|
||||
<glyph unicode=":" d="M412 182q0 90 57 148.5t145 58.5t145.5 -58.5t57.5 -148.5t-57.5 -148.5t-145.5 -58.5t-145 58.5t-57 148.5zM412 878q0 90 57 148.5t145 58.5t145.5 -58.5t57.5 -148.5t-57.5 -148.5t-145.5 -58.5t-145 58.5t-57 148.5z" />
|
||||
<glyph unicode=";" d="M387 -299q260 104 262 315q-10 -2 -28 -2q-78 0 -135.5 46t-57.5 137q0 86 58.5 134t138.5 48q111 0 166 -82t55 -225q0 -188 -102.5 -318.5t-294.5 -193.5zM412 878q0 90 57 148.5t145 58.5t145.5 -58.5t57.5 -148.5t-57.5 -148.5t-145.5 -58.5t-145 58.5t-57 148.5z " />
|
||||
<glyph unicode="<" d="M242 596v168l778 516v-219l-580 -377v-8l580 -377v-219z" />
|
||||
<glyph unicode="=" d="M160 363v172h909v-172h-909zM160 819v172h909v-172h-909z" />
|
||||
<glyph unicode=">" d="M209 80v219l579 377v8l-579 377v219l778 -516v-168z" />
|
||||
<glyph unicode="?" d="M213 1221q72 78 168 127t219 49q86 0 160 -21.5t127 -63.5t82.5 -102.5t29.5 -140.5q0 -66 -25.5 -117t-64.5 -93t-83 -81t-78.5 -80t-56 -89t-13.5 -109h-207q-12 70 6.5 125t51 100t75.5 83t80 75t62.5 74.5t25.5 82.5q0 74 -50 119t-138 45q-72 0 -129.5 -28.5 t-108.5 -77.5zM408 160q0 82 51 131t127 49t127 -49t51 -131q0 -84 -51 -134.5t-127 -50.5t-127 50.5t-51 134.5z" />
|
||||
<glyph unicode="@" d="M82 516q0 203 48 352.5t130 247.5t192.5 146.5t237.5 48.5q111 0 193 -38t135 -104.5t78.5 -157.5t25.5 -196v-610h-131l-18 110h-8q-41 -55 -109 -95t-149 -40q-55 0 -103.5 20.5t-83.5 57.5t-55.5 88t-20.5 113q0 160 138.5 239.5t388.5 110.5v31q0 66 -17.5 124 t-52.5 103t-90 70.5t-129 25.5q-82 0 -160 -39t-138 -118.5t-97 -203.5t-37 -294q0 -162 37 -285t98 -206t144 -125t176 -42q92 0 155.5 22t122.5 62l74 -120q-84 -53 -169 -83t-192 -30q-123 0 -233.5 51t-195.5 152.5t-135 255t-50 356.5zM623 463q0 -59 35.5 -92 t99.5 -33q49 0 96 27.5t94 74.5v224q-178 -27 -251.5 -73t-73.5 -128z" />
|
||||
<glyph unicode="A" d="M41 0l434 1339h279l434 -1339h-252l-102 362h-447l-104 -362h-242zM440 549h340l-47 164q-29 106 -59.5 218.5t-59.5 223.5h-8q-29 -113 -58.5 -224.5t-60.5 -217.5z" />
|
||||
<glyph unicode="B" d="M190 0v1339h396q102 0 189 -17t150.5 -55t99.5 -101.5t36 -155.5q0 -96 -57.5 -178t-176.5 -113v-8q147 -25 224 -104t77 -214q0 -100 -37.5 -174t-106.5 -123t-162 -72.5t-204 -23.5h-428zM428 184h166q147 0 225 55.5t78 170.5q0 204 -300 204h-3h-166v-430zM428 784 h135q141 0 202.5 51.5t61.5 145.5q0 92 -64.5 133t-193.5 41h-141v-371z" />
|
||||
<glyph unicode="C" d="M123 666q0 164 47 294t130 220t196.5 137t246.5 47q123 0 219.5 -51t161.5 -117l-133 -147q-49 49 -109.5 78.5t-138.5 29.5q-84 0 -153.5 -33.5t-118.5 -96t-76.5 -153t-27.5 -202.5q0 -115 27.5 -206t77.5 -153.5t121 -96.5t157 -34q82 0 146.5 35t121.5 98l131 -145 q-82 -94 -184.5 -144.5t-229.5 -50.5q-129 0 -240.5 45.5t-194.5 133.5t-130 217t-47 295z" />
|
||||
<glyph unicode="D" d="M154 0v1339h346q297 0 463.5 -165.5t166.5 -497.5q0 -166 -43 -293t-122.5 -212t-194.5 -128t-258 -43h-358zM391 193h94q193 0 297.5 117.5t104.5 365.5q0 246 -104.5 358.5t-297.5 112.5h-94v-954z" />
|
||||
<glyph unicode="E" d="M213 0v1339h856v-200h-618v-342h524v-201h-524v-395h639v-201h-877z" />
|
||||
<glyph unicode="F" d="M248 0v1339h852v-200h-615v-379h523v-201h-523v-559h-237z" />
|
||||
<glyph unicode="G" d="M98 666q0 166 46 295t127 219t192.5 137t243.5 47q133 0 226 -52t152 -116l-131 -147q-47 47 -103 77.5t-144 30.5q-80 0 -146.5 -33.5t-115 -96t-75 -153t-26.5 -202.5q0 -229 93 -359.5t274 -130.5q53 0 102 15.5t78 44.5v288h-232v195h447v-590q-66 -66 -173.5 -113 t-236.5 -47q-127 0 -236.5 45.5t-189.5 133.5t-126 217t-46 295z" />
|
||||
<glyph unicode="H" d="M147 0v1339h238v-538h459v538h237v-1339h-237v594h-459v-594h-238z" />
|
||||
<glyph unicode="I" d="M172 0v201h324v938h-324v200h885v-200h-324v-938h324v-201h-885z" />
|
||||
<glyph unicode="J" d="M143 197l144 145q57 -80 126.5 -121t141.5 -41q123 0 182.5 64.5t59.5 218.5v678h-543v198h780v-897q0 -96 -24.5 -181t-80 -148.5t-146.5 -100.5t-222 -37q-55 0 -113.5 12.5t-112.5 39t-103.5 68.5t-88.5 102z" />
|
||||
<glyph unicode="K" d="M170 0v1339h240v-608h6l477 608h264l-412 -522l457 -817h-264l-334 631l-194 -240v-391h-240z" />
|
||||
<glyph unicode="L" d="M246 0v1339h235v-1138h629v-201h-864z" />
|
||||
<glyph unicode="M" d="M150 0v1339h245l162 -520l57 -203h7l55 203l158 520h245v-1339h-194v594q0 47 3 114.5t7 140.5t9 140.5t9 114.5h-6l-96 -328l-141 -416h-115l-143 416l-95 328h-6q4 -47 9.5 -114.5t10.5 -140.5t8 -140.5t3 -114.5v-594h-192z" />
|
||||
<glyph unicode="N" d="M152 0v1339h241l359 -753l125 -285h6q-8 104 -20.5 224t-12.5 233v581h227v-1339h-241l-359 756l-125 282h-6q8 -106 20.5 -223t12.5 -229v-586h-227z" />
|
||||
<glyph unicode="O" d="M88 676q0 162 39 290t107.5 216t166 135t213.5 47q117 0 214.5 -47t166 -135t107.5 -216t39 -290q0 -164 -39 -293t-107.5 -220t-166 -139.5t-214.5 -48.5t-214 48.5t-165.5 139.5t-107.5 220t-39 293zM332 676q0 -113 19.5 -204t56 -155.5t88.5 -99.5t118 -35 q63 0 115.5 35t89.5 99.5t57.5 155.5t20.5 204q0 225 -77 353t-206 128t-205.5 -128t-76.5 -353z" />
|
||||
<glyph unicode="P" d="M178 0v1339h451q111 0 204 -20.5t161.5 -69.5t106.5 -128t38 -195q0 -111 -39 -193t-107.5 -136t-162 -79.5t-201.5 -25.5h-213v-492h-238zM416 682h194q295 0 295 244q0 127 -73.5 175t-221.5 48h-194v-467z" />
|
||||
<glyph unicode="Q" d="M88 676q0 162 38 290t106.5 216t165 135t212.5 47q117 0 213.5 -47t165 -135t106.5 -216t38 -290q0 -276 -104.5 -453.5t-280.5 -228.5q35 -78 106.5 -114t159.5 -36q31 0 57.5 7.5t46.5 17.5l43 -182q-35 -16 -78 -25.5t-96 -9.5q-188 0 -310 94t-179 242 q-92 20 -168 77.5t-130.5 145.5t-83 206t-28.5 259zM330 676q0 -113 19.5 -205t55 -156.5t88 -99.5t117.5 -35q63 0 115.5 35t88.5 99.5t55.5 156.5t19.5 205q0 225 -75 354t-204 129q-131 0 -205.5 -129t-74.5 -354z" />
|
||||
<glyph unicode="R" d="M172 0v1339h446q104 0 194.5 -20.5t157 -66.5t104.5 -121.5t38 -188.5q0 -154 -72.5 -248t-193.5 -135l319 -559h-268l-287 524h-200v-524h-238zM410 713h184q139 0 212 57t73 172q0 117 -73 162t-212 45h-184v-436z" />
|
||||
<glyph unicode="S" d="M119 172l139 162q76 -68 171 -110t194 -42q123 0 186 47t63 125q0 41 -16 70t-46 50.5t-72 39.5t-91 37l-182 80q-53 20 -104.5 51t-92.5 74t-65.5 100t-24.5 131q0 80 35 148.5t96.5 120t146.5 80t187 28.5q123 0 234.5 -46t195.5 -124l-123 -152q-68 53 -141.5 84 t-165.5 31q-104 0 -164.5 -42t-60.5 -116q0 -39 18.5 -66.5t50 -49t73.5 -38t87 -34.5l176 -76q63 -25 116.5 -57.5t92.5 -75.5t60.5 -99.5t21.5 -129.5q0 -82 -33.5 -154t-97 -126t-156 -86t-209.5 -32q-141 0 -270 51.5t-229 145.5z" />
|
||||
<glyph unicode="T" d="M74 1139v200h1081v-200h-422v-1139h-237v1139h-422z" />
|
||||
<glyph unicode="U" d="M147 512v827h238v-845q0 -166 61.5 -239t169.5 -73q111 0 174.5 73t63.5 239v845h227v-827q0 -276 -120.5 -406.5t-344.5 -130.5q-219 0 -344 131.5t-125 405.5z" />
|
||||
<glyph unicode="V" d="M57 1339h252l189 -673q33 -115 58.5 -217.5t59.5 -219.5h9q35 117 60.5 219.5t55.5 217.5l187 673h243l-415 -1339h-281z" />
|
||||
<glyph unicode="W" d="M14 1339h240l74 -776q4 -86 10 -168t10 -168h6q16 86 38 169t40 167l117 434h151l115 -434q16 -82 37.5 -166t38.5 -170h8q4 86 10 170t11 166l69 776h225l-182 -1339h-252l-118 489q-14 63 -24.5 127t-19.5 123h-6q-10 -59 -20 -122.5t-25 -127.5l-112 -489h-248z" />
|
||||
<glyph unicode="X" d="M74 0l393 690l-369 649h264l158 -297q25 -47 49.5 -96t55.5 -108h8q27 59 49.5 108t44.5 96l152 297h251l-368 -659l393 -680h-262l-174 315q-29 53 -55.5 105.5t-57.5 114.5h-8q-29 -61 -53.5 -113.5t-50.5 -106.5l-168 -315h-252z" />
|
||||
<glyph unicode="Y" d="M55 1339h252l168 -360q35 -78 69 -152.5t68 -156.5h9q37 82 71.5 157.5t69.5 153.5l166 358h246l-441 -874v-465h-237v465z" />
|
||||
<glyph unicode="Z" d="M125 0v143l686 998h-625v198h918v-143l-688 -995h696v-201h-987z" />
|
||||
<glyph unicode="[" d="M436 -311v1761h586v-129h-404v-1503h404v-129h-586z" />
|
||||
<glyph unicode="\" d="M176 1454h203l674 -1782h-203z" />
|
||||
<glyph unicode="]" d="M209 -182h401v1503h-401v129h586v-1761h-586v129z" />
|
||||
<glyph unicode="^" d="M201 571l315 801h197l315 -801h-199l-114 310l-97 276h-8l-96 -276l-115 -310h-198z" />
|
||||
<glyph unicode="_" d="M123 -127h983v-184h-983v184z" />
|
||||
<glyph unicode="`" d="M336 1497h246l184 -323h-176z" />
|
||||
<glyph unicode="a" d="M145 270q0 84 39 148.5t123 109.5t215 74t318 41q-6 86 -61.5 141.5t-178.5 55.5q-86 0 -169 -32t-161 -75l-86 158q92 53 211 96t248 43q211 0 321.5 -111.5t110.5 -322.5v-596h-192l-19 125h-6q-80 -61 -177 -105.5t-200 -44.5q-74 0 -135 21.5t-106 60.5t-70 93 t-25 120zM373 289q0 -68 52 -98.5t128 -30.5t146.5 33.5t140.5 89.5v213q-135 -10 -225.5 -27.5t-143.5 -44.5t-75.5 -60.5t-22.5 -74.5z" />
|
||||
<glyph unicode="b" d="M168 0v1446h238v-373l-9 -176q66 61 148 97t164 36q98 0 173.5 -36t129 -102.5t81 -160.5t27.5 -211q0 -129 -36.5 -230.5t-99 -171t-143.5 -106.5t-167 -37q-72 0 -150 35t-143 101h-6l-21 -111h-186zM406 262q55 -51 114.5 -71.5t106.5 -20.5q106 0 178 87t72 259 q0 152 -55.5 236t-178.5 84q-57 0 -116.5 -29t-120.5 -92v-453z" />
|
||||
<glyph unicode="c" d="M150 502q0 125 46 223t123.5 165.5t182 103.5t219.5 36q123 0 216 -41t157 -100l-113 -148q-59 47 -119.5 72t-128.5 25q-152 0 -246 -91.5t-94 -244.5q0 -76 24.5 -137.5t68.5 -105.5t105.5 -67.5t135.5 -23.5q86 0 156.5 32.5t130.5 78.5l98 -152q-86 -76 -192.5 -114 t-214.5 -38q-119 0 -220.5 35t-176.5 102.5t-116.5 166t-41.5 223.5z" />
|
||||
<glyph unicode="d" d="M109 502q0 123 36.5 221t99 165.5t142.5 104.5t166 37q90 0 154.5 -31.5t126.5 -87.5l-11 170v365h238v-1446h-195l-18 117h-6q-59 -59 -138 -100.5t-165 -41.5q-96 0 -176 36t-136.5 103.5t-87 165t-30.5 222.5zM352 504q0 -164 63.5 -249t180.5 -85q125 0 227 121v452 q-53 51 -106 72t-109 21q-53 0 -99 -22.5t-81 -64.5t-55.5 -103.5t-20.5 -141.5z" />
|
||||
<glyph unicode="e" d="M127 502q0 123 44 221t115.5 166.5t165 104.5t191.5 36q113 0 200 -36t146.5 -98t90 -150.5t30.5 -192.5q0 -35 -3 -65.5t-7 -49.5h-731q12 -135 106 -207.5t232 -72.5q78 0 145.5 21.5t136.5 60.5l80 -148q-82 -51 -182.5 -84t-210.5 -33q-115 0 -215.5 36t-174 103.5 t-116.5 165t-43 222.5zM367 598h526q0 117 -61.5 183.5t-182.5 66.5q-102 0 -182 -63.5t-100 -186.5z" />
|
||||
<glyph unicode="f" d="M188 819v176l279 11v55q0 88 24.5 163.5t77 130t133 85t191.5 30.5q82 0 156.5 -14t142.5 -41l-49 -174q-55 23 -107.5 33t-113.5 10q-117 0 -169.5 -56.5t-52.5 -162.5v-59h383v-187h-383v-819h-233v819h-279z" />
|
||||
<glyph unicode="g" d="M127 -176q0 59 41 114.5t119 96.5v8q-41 23 -72 63.5t-31 104.5q0 51 33 101t88 89v9q-53 37 -91 102.5t-38 155.5q0 86 34 153.5t90 113.5t131 70.5t159 24.5q90 0 158 -24h411v-175h-235q27 -31 47 -74.5t20 -97.5q0 -84 -30.5 -147t-85 -106.5t-128 -65t-157.5 -21.5 q-76 0 -154 27q-61 -39 -61 -92q0 -96 190 -96h205q197 0 296 -57.5t99 -188.5q0 -74 -41 -139.5t-115.5 -113.5t-182 -75.5t-240.5 -27.5q-104 0 -188.5 16t-144.5 50t-93 84t-33 118zM324 -145q0 -68 74.5 -107t215.5 -39q74 0 133.5 13.5t102.5 36t65.5 52t22.5 62.5 q0 59 -51 78.5t-152 19.5h-160q-49 0 -86.5 3t-66.5 14q-53 -33 -75.5 -65.5t-22.5 -67.5zM399 668q0 -96 56.5 -148.5t134.5 -52.5t135 52t57 149q0 92 -57 146t-135 54t-134.5 -54t-56.5 -146z" />
|
||||
<glyph unicode="h" d="M168 0v1446h238v-373l-15 -215q70 70 157 121t204 51q174 0 256 -105.5t82 -301.5v-623h-238v592q0 123 -42 179t-146 56q-74 0 -131.5 -34.5t-126.5 -106.5v-686h-238z" />
|
||||
<glyph unicode="i" d="M172 819v187h672v-1006h-236v819h-436zM541 1339q0 72 48 116t120 44q74 0 121 -44t47 -116t-47 -114.5t-121 -42.5q-72 0 -120 43t-48 114z" />
|
||||
<glyph unicode="j" d="M100 -354l72 170q57 -27 111.5 -39.5t103.5 -12.5q129 0 175 61.5t46 180.5v813h-436v187h672v-988q0 -92 -21.5 -171.5t-72 -140t-136.5 -94.5t-215 -34q-88 0 -162.5 19.5t-136.5 48.5zM541 1339q0 72 48 116t120 44q74 0 121 -44t47 -116t-47 -114.5t-121 -42.5 q-72 0 -120 43t-48 114z" />
|
||||
<glyph unicode="k" d="M184 0v1446h238v-905h8l453 465h262l-389 -406l432 -600h-258l-316 455l-192 -193v-262h-238z" />
|
||||
<glyph unicode="l" d="M145 1260v186h553v-1092q0 -102 51.5 -144t129.5 -42q74 0 172 39l55 -174q-72 -25 -134.5 -41.5t-150.5 -16.5q-178 0 -268 102.5t-90 289.5v893h-318z" />
|
||||
<glyph unicode="m" d="M98 0v1006h178l19 -129h6q35 66 85 109.5t136 43.5q147 0 185 -168q35 72 89 120t138 48q106 0 164.5 -83t58.5 -234v-713h-223v694q0 133 -84 133q-41 0 -71.5 -30.5t-59.5 -94.5v-702h-184v694q0 133 -86 133q-41 0 -70 -30.5t-57 -94.5v-702h-224z" />
|
||||
<glyph unicode="n" d="M168 0v1006h194l19 -152h8q72 72 159 124t204 52q174 0 256 -105.5t82 -301.5v-623h-238v592q0 123 -42 179t-146 56q-74 0 -131.5 -34.5t-126.5 -106.5v-686h-238z" />
|
||||
<glyph unicode="o" d="M109 502q0 125 42 223t111.5 165.5t161.5 103.5t190 36t190.5 -36t162 -103.5t111.5 -165.5t42 -223t-42 -223.5t-111.5 -166t-161.5 -102.5t-191 -35q-98 0 -190 35t-161.5 102.5t-111.5 166t-42 223.5zM352 502q0 -152 70 -243t192 -91q123 0 193 91t70 243 q0 154 -70 245t-193 91t-192.5 -91t-69.5 -245z" />
|
||||
<glyph unicode="p" d="M168 -397v1403h194l19 -115h6q66 59 151 99t171 40q98 0 175 -36t129 -103.5t79.5 -161.5t27.5 -211q0 -129 -36.5 -230.5t-99 -170t-143.5 -105.5t-167 -37q-70 0 -142.5 31t-134.5 88l9 -176v-315h-238zM406 262q55 -51 113.5 -71.5t105.5 -20.5q109 0 180.5 87 t71.5 259q0 152 -55.5 236t-178.5 84q-57 0 -116.5 -29t-120.5 -92v-453z" />
|
||||
<glyph unicode="q" d="M109 502q0 123 36.5 221t99 165.5t142.5 104.5t166 37q88 0 158.5 -32.5t136.5 -98.5h6l20 107h187v-1403h-238v338l11 168q-59 -57 -136 -95.5t-159 -38.5q-96 0 -176 36t-136.5 103.5t-87 165t-30.5 222.5zM352 504q0 -164 63.5 -249t180.5 -85q125 0 227 121v452 q-53 51 -106 72t-109 21q-53 0 -99 -22.5t-81 -64.5t-55.5 -103.5t-20.5 -141.5z" />
|
||||
<glyph unicode="r" d="M266 0v1006h195l20 -228h6q78 123 187.5 187.5t240.5 64.5q61 0 105.5 -9t89.5 -32l-49 -198q-49 16 -87 23t-95 7q-104 0 -200.5 -59.5t-174.5 -206.5v-555h-238z" />
|
||||
<glyph unicode="s" d="M127 131l111 150q86 -59 179 -93t210 -34q119 0 174 33.5t55 86.5t-71.5 89t-225.5 77q-72 18 -139.5 45t-118.5 62t-82 80t-31 102q0 133 115 217t330 84q123 0 231.5 -41t186.5 -92l-109 -145q-68 45 -144.5 72.5t-162.5 27.5q-117 0 -164 -32t-47 -79q0 -27 21.5 -48 t59.5 -38.5t89 -34t113 -30.5q88 -23 158.5 -50.5t120.5 -62.5t78 -82t28 -108q0 -66 -31 -122t-90.5 -98t-147.5 -67t-202 -25q-141 0 -270.5 45.5t-223.5 110.5z" />
|
||||
<glyph unicode="t" d="M121 819v176l278 11l31 274h195v-274h458v-187h-458v-413q0 -125 51 -183.5t180 -58.5q66 0 119 11t102 30l45 -172q-68 -23 -149.5 -40.5t-173.5 -17.5q-115 0 -193 31t-126 87.5t-69.5 136t-21.5 176.5v413h-268z" />
|
||||
<glyph unicode="u" d="M143 383v623h236v-592q0 -123 42 -179.5t146 -56.5q72 0 128.5 33t123.5 117v678h236v-1006h-193l-18 160h-8q-72 -84 -158 -134.5t-199 -50.5q-176 0 -256 105.5t-80 302.5z" />
|
||||
<glyph unicode="v" d="M84 1006h238l194 -523q29 -76 52.5 -150.5t47.5 -152.5h9q25 78 48 153t52 150l195 523h225l-391 -1006h-269z" />
|
||||
<glyph unicode="w" d="M12 1006h234l92 -523q12 -72 21.5 -143.5t19.5 -144.5h8q10 74 22.5 145.5t28.5 142.5l94 424h177l94 -424q16 -72 29.5 -143.5t25.5 -144.5h8q12 74 21.5 145.5t19.5 142.5l92 523h218l-187 -1006h-276l-88 420q-12 72 -23.5 143.5t-24.5 149.5h-8q-10 -59 -19 -128 t-26 -167l-86 -418h-272z" />
|
||||
<glyph unicode="x" d="M104 0l365 522l-340 484h256l131 -191q25 -41 53.5 -85t57.5 -87h8q23 43 49.5 88t48.5 86l119 189h248l-342 -508l366 -498h-256l-145 199q-29 41 -60.5 88t-62.5 90h-8q-29 -45 -54.5 -89t-54.5 -91l-131 -197h-248z" />
|
||||
<glyph unicode="y" d="M82 1006h235l207 -494q29 -68 55.5 -143.5t55.5 -151.5h8q23 72 49.5 147.5t50.5 147.5l183 494h223l-420 -1051q-33 -86 -73 -153.5t-93 -115.5t-122.5 -74t-159.5 -26q-41 0 -78 5.5t-66 15.5l45 184q20 -6 41 -10t43 -4q96 0 153.5 48t88.5 124l24 63z" />
|
||||
<glyph unicode="z" d="M145 0v127l605 692h-535v187h866v-125l-606 -693h625v-188h-955z" />
|
||||
<glyph unicode="{" d="M231 498v143q86 0 142.5 14.5t89.5 36t45 48t12 55.5q0 96 -10 181t-10 191q0 82 22.5 136.5t69.5 87t118.5 46t170.5 13.5h141v-129h-94q-72 0 -117 -8t-70.5 -27.5t-35 -51.5t-9.5 -77q0 -86 5.5 -172t5.5 -180q0 -109 -43 -161t-152 -71v-8q109 -18 152 -70.5 t43 -160.5q0 -98 -5.5 -180t-5.5 -172q0 -47 9.5 -78t35 -50.5t70.5 -27.5t117 -8h94v-129h-141q-98 0 -170 13t-119 46t-69.5 87t-22.5 136q0 55 3 101.5t7 89.5t7 87t3 95q0 27 -12 54.5t-45 49t-89.5 36t-142.5 14.5z" />
|
||||
<glyph unicode="|" d="M510 -512v2048h209v-2048h-209z" />
|
||||
<glyph unicode="}" d="M209 -182h94q70 0 115 8t71.5 27.5t36 50t9.5 78.5q0 90 -5.5 172t-5.5 180q0 109 42 161t153 70v8q-111 18 -153 70.5t-42 161.5q0 94 5.5 180t5.5 172q0 45 -9.5 77t-36 51.5t-71.5 27.5t-115 8h-94v129h141q98 0 169 -13.5t118 -46t69.5 -87t22.5 -136.5 q0 -106 -10 -191t-10 -181q0 -29 12 -55.5t45 -48t89 -36t142 -14.5v-143q-86 0 -142 -14.5t-89 -36t-45 -49t-12 -54.5q0 -51 3 -95t7 -87t7 -89t3 -102q0 -82 -22.5 -136t-69.5 -87t-117.5 -46t-169.5 -13h-141v129z" />
|
||||
<glyph unicode="~" d="M131 565q59 154 139 216.5t168 62.5q61 0 108.5 -25.5t88.5 -56.5t79 -56.5t81 -25.5q47 0 86 37t73 135l144 -68q-59 -152 -139 -214t-168 -62q-61 0 -108.5 25.5t-88.5 56.5t-79 56.5t-81 25.5q-47 0 -86 -38t-74 -134z" />
|
||||
<glyph unicode=" " />
|
||||
<glyph unicode="¡" d="M436 846q0 84 51.5 134t126.5 50q76 0 127.5 -50t51.5 -134q0 -82 -51.5 -131t-127.5 -49t-127 49t-51 131zM498 -367l6 234l33 639h155l33 -639l6 -234h-233z" />
|
||||
<glyph unicode="¢" d="M213 631q0 106 33 193t89 150.5t134 103.5t170 55v206h137v-200q84 -4 148.5 -37t111.5 -80l-110 -145q-72 63 -150 71v-631q53 6 98.5 31t81.5 53l99 -149q-59 -53 -132 -85t-147 -40v-203h-137v203q-193 25 -309.5 154t-116.5 350zM444 631q0 -115 50.5 -193 t144.5 -106v600q-94 -31 -144.5 -110t-50.5 -191z" />
|
||||
<glyph unicode="£" d="M150 0v145q117 49 181 138.5t64 201.5q0 23 -3 42.5t-7 41.5h-233v142l184 10q-20 53 -35.5 105.5t-15.5 103.5q0 92 32.5 165.5t92 125t142.5 79t183 27.5q125 0 215 -43t156 -117l-131 -129q-45 45 -97.5 71t-123.5 26q-117 0 -180.5 -56.5t-63.5 -160.5 q0 -51 13.5 -99.5t29.5 -97.5h367v-152h-326q8 -39 8 -86q0 -92 -32.5 -153.5t-98.5 -120.5v-8h637v-201h-958z" />
|
||||
<glyph unicode="¤" d="M96 272l168 170q-35 47 -53 105.5t-18 126.5q0 70 18 128t51 105l-166 170l121 123l180 -184q98 63 217 63t217 -63l181 184l121 -123l-168 -170q35 -47 53 -105.5t18 -127.5q0 -68 -19.5 -126.5t-53.5 -105.5l170 -170l-121 -122l-183 184q-47 -33 -102 -48.5 t-113 -15.5q-121 0 -217 64l-180 -184zM406 674q0 -106 61 -171t147 -65t147.5 64.5t61.5 171.5q0 109 -61.5 173t-147.5 64t-147 -64t-61 -173z" />
|
||||
<glyph unicode="¥" d="M86 1303h242l155 -310q35 -68 65 -135t64 -137h9q35 70 65.5 137.5t65.5 134.5l155 310h236l-357 -627h306v-121h-361v-121h361v-123h-361v-311h-235v311h-359v123h359v121h-359v121h303z" />
|
||||
<glyph unicode="¦" d="M510 428h209v-940h-209v940zM510 618v918h209v-918h-209z" />
|
||||
<glyph unicode="§" d="M166 684q0 84 49 152.5t127 111.5q-61 68 -61 168q0 61 22.5 114.5t65.5 92.5t105.5 61.5t143.5 22.5q111 0 200 -39t153 -86l-111 -149q-51 41 -108.5 68.5t-120.5 27.5q-72 0 -103.5 -29t-31.5 -74q0 -47 42 -78.5t105.5 -61.5t136 -61.5t136 -76.5t105.5 -109.5 t42 -156.5q0 -94 -47 -159t-131 -112q25 -33 39 -71.5t14 -89.5q0 -66 -24.5 -120.5t-70.5 -95t-113.5 -63.5t-151.5 -23q-115 0 -216.5 41t-168.5 119l139 123q53 -51 112.5 -78.5t133.5 -27.5q72 0 109.5 32.5t37.5 81.5t-41 82t-103.5 61.5t-135 59.5t-135 75 t-103.5 107.5t-41 159.5zM367 700q0 -57 37.5 -96t95 -69.5t127 -58t131.5 -62.5q53 27 78.5 61.5t25.5 87.5q0 57 -37 97t-94 71t-125.5 57.5t-132.5 61.5q-51 -27 -78.5 -63t-27.5 -87z" />
|
||||
<glyph unicode="¨" d="M268 1323q0 59 38 99t97 40t98.5 -40t39.5 -99t-39.5 -98t-98.5 -39t-97 39t-38 98zM688 1323q0 59 39 99t98 40t97.5 -40t38.5 -99t-38.5 -98t-97.5 -39t-98 39t-39 98z" />
|
||||
<glyph unicode="©" d="M49 664q0 158 44 283.5t121 212.5t180.5 133t219.5 46q117 0 220.5 -46t180.5 -133t121 -213t44 -283q0 -158 -44 -284t-121 -215t-180.5 -137t-220.5 -48t-220 48t-180 137t-121 215t-44 284zM164 664q0 -129 32.5 -235.5t91 -182.5t141.5 -118t185 -42q100 0 184.5 42 t142.5 118t91 182.5t33 235.5q0 131 -33 236.5t-91 180t-142 114.5t-185 40q-102 0 -185 -40t-141.5 -114.5t-91 -180t-32.5 -236.5zM291 662q0 90 28.5 162.5t75.5 122.5t110.5 78t131.5 28q78 0 130 -29t93 -70l-92 -102q-29 27 -55.5 41t-61.5 14q-86 0 -134 -69.5 t-48 -175.5q0 -119 47 -189t125 -70q47 0 79 17.5t66 42.5l78 -115q-47 -39 -102 -64.5t-127 -25.5q-74 0 -136.5 27.5t-108.5 80t-72.5 127.5t-26.5 169z" />
|
||||
<glyph unicode="ª" d="M330 713q0 109 99 167t316 81q-4 51 -33.5 84.5t-97.5 33.5q-49 0 -99 -18.5t-97 -46.5l-64 114q59 35 135 60.5t158 25.5q131 0 201 -73.5t70 -216.5v-392h-142l-12 72h-4q-47 -37 -103.5 -62.5t-115.5 -25.5q-92 0 -151.5 55.5t-59.5 141.5zM496 727q0 -41 28.5 -59.5 t69.5 -18.5t79 17.5t72 48.5v139q-139 -16 -194 -48t-55 -79z" />
|
||||
<glyph unicode="«" d="M129 399v234l332 321l104 -94l-260 -344l260 -346l-104 -92zM635 399v234l332 321l104 -94l-260 -344l260 -346l-104 -92z" />
|
||||
<glyph unicode="¬" d="M160 590v172h909v-559h-178v387h-731z" />
|
||||
<glyph unicode="­" d="M160 590v172h909v-172h-909z" />
|
||||
<glyph unicode="®" d="M217 1053q0 90 31 165.5t85 130t125.5 84t153.5 29.5t155 -29.5t126 -84t85 -130t32 -165.5t-32 -165t-85 -129t-126 -84t-155 -30t-153.5 30t-125.5 84t-85 129t-31 165zM313 1053q0 -72 22.5 -130.5t62.5 -100.5t94 -65.5t120 -23.5q63 0 118.5 23.5t95.5 65.5 t62.5 100.5t22.5 130.5t-22.5 131t-62.5 102t-95 66.5t-119 23.5q-66 0 -120 -23.5t-94 -66.5t-62.5 -102.5t-22.5 -130.5zM457 854v408h168q68 0 115.5 -31t47.5 -103q0 -35 -19 -65.5t-54 -46.5l90 -162h-111l-65 133h-72v-133h-100zM557 1061h47q37 0 56.5 16.5 t19.5 44.5q0 27 -17.5 44.5t-54.5 17.5h-51v-123z" />
|
||||
<glyph unicode="¯" d="M348 1221v155h533v-155h-533z" />
|
||||
<glyph unicode="°" d="M338 1124q0 61 22.5 112.5t60.5 88.5t88 57.5t107 20.5t107.5 -20.5t88.5 -57.5t59.5 -88t21.5 -113q0 -61 -21.5 -112t-59.5 -88t-88.5 -57.5t-107.5 -20.5t-107 20.5t-88 57.5t-60.5 88t-22.5 112zM469 1124q0 -70 40.5 -113.5t106.5 -43.5t106 44t40 113q0 72 -40 116 t-106 44t-106.5 -44t-40.5 -116z" />
|
||||
<glyph unicode="±" d="M160 0v170h909v-170h-909zM160 612v170h364v367h181v-367h364v-170h-364v-319h-181v319h-364z" />
|
||||
<glyph unicode="²" d="M338 1429q53 55 116.5 91t141.5 36q117 0 184.5 -56t67.5 -159q0 -39 -16.5 -75.5t-44 -73.5t-65.5 -74t-79 -74h233v-143h-512v94q143 102 229.5 180t86.5 138q0 47 -27.5 74.5t-81.5 27.5q-72 0 -129 -78z" />
|
||||
<glyph unicode="³" d="M344 975l76 108q33 -31 81 -53.5t97 -22.5q45 0 74.5 18.5t29.5 59.5q0 45 -40.5 66.5t-131.5 21.5v97q145 0 146 86q0 31 -24.5 50t-71.5 19q-35 0 -74 -17.5t-70 -41.5l-74 102q43 41 110 64.5t144 23.5q98 0 165 -47t67 -129q0 -57 -26.5 -92t-86.5 -57 q139 -35 139 -158q0 -45 -20 -81t-56 -61.5t-83 -40t-101 -14.5q-68 0 -138 22.5t-132 76.5z" />
|
||||
<glyph unicode="´" d="M463 1174l184 323h246l-254 -323h-176z" />
|
||||
<glyph unicode="µ" d="M143 -393v1399h236v-592q0 -123 44 -179.5t138 -56.5q70 0 131.5 38t126.5 153v637h238q-4 -94 -6 -195.5t-5.5 -202t-4.5 -193.5t-1 -169q0 -43 18.5 -60.5t47.5 -17.5h15t22 4l28 -176q-47 -20 -122 -21q-90 0 -137.5 51.5t-67.5 157.5h-6q-53 -100 -126 -150t-159 -50 q-57 0 -106.5 14t-84.5 63q0 -70 2.5 -127t4.5 -109t5 -105.5t7 -112.5h-238z" />
|
||||
<glyph unicode="¶" d="M123 893q0 125 38 210t102.5 137t152.5 75.5t190 23.5h88v-905h-73q-104 0 -195.5 27.5t-158 85t-105.5 143.5t-39 203zM803 -164v1503h233v-1503h-233z" />
|
||||
<glyph unicode="·" d="M412 717q0 90 57 148.5t145 58.5t145.5 -58.5t57.5 -148.5t-57.5 -148.5t-145.5 -58.5t-145 58.5t-57 148.5z" />
|
||||
<glyph unicode="¸" d="M430 -342q78 10 127 31.5t49 62.5q0 33 -29.5 55.5t-95.5 38.5l82 160h143l-47 -100q51 -18 86 -52t35 -98q0 -49 -26.5 -83.5t-72.5 -59.5t-105.5 -39t-125.5 -18z" />
|
||||
<glyph unicode="¹" d="M414 1354v108q37 6 64.5 12.5t51 14.5t45 18t43.5 25h138v-631h-176v453h-166z" />
|
||||
<glyph unicode="º" d="M281 864q0 84 27.5 149.5t73.5 109.5t106.5 67.5t125.5 23.5q66 0 126.5 -23.5t106.5 -67.5t73.5 -109.5t27.5 -149.5q0 -82 -27.5 -146.5t-73.5 -109.5t-106.5 -68.5t-126.5 -23.5t-126 23.5t-106 68.5t-73.5 109.5t-27.5 146.5zM459 864q0 -94 40 -151.5t115 -57.5 q78 0 117 57.5t39 151.5q0 96 -39 154.5t-117 58.5q-76 0 -115.5 -58.5t-39.5 -154.5z" />
|
||||
<glyph unicode="»" d="M160 170l260 346l-260 344l104 94l332 -321v-234l-332 -321zM666 170l260 346l-260 344l104 94l332 -321v-234l-332 -321z" />
|
||||
<glyph unicode="¼" d="M45 182l379 371l86 -80l-338 -412zM150 1125v108q37 6 64.5 12.5t51 14.5t45 18t43.5 25h138v-631h-176v453h-166zM606 137v84l274 410h211v-383h105v-111h-105v-137h-155v137h-330zM719 866l338 412l127 -121l-379 -371zM770 248h166v80l10 186h-6l-80 -129z" />
|
||||
<glyph unicode="½" d="M45 182l379 371l86 -80l-338 -412zM150 1125v108q37 6 64.5 12.5t51 14.5t45 18t43.5 25h138v-631h-176v453h-166zM606 528q53 55 116.5 91t141.5 36q117 0 184.5 -56t67.5 -159q0 -39 -16.5 -75.5t-44 -73.5t-65.5 -74t-79 -74h233v-143h-512v94q143 102 229.5 180 t86.5 138q0 47 -27.5 74.5t-81.5 27.5q-72 0 -129 -78zM719 866l338 412l127 -121l-379 -371z" />
|
||||
<glyph unicode="¾" d="M78 182l379 371l86 -80l-338 -412zM78 746l76 108q33 -31 81 -53.5t97 -22.5q45 0 74.5 18.5t29.5 59.5q0 45 -40.5 66.5t-131.5 21.5v97q145 0 146 86q0 31 -24.5 50t-71.5 19q-35 0 -74 -17.5t-70 -41.5l-74 102q43 41 110 64.5t144 23.5q98 0 165 -47t67 -129 q0 -57 -26.5 -92t-86.5 -57q139 -35 139 -158q0 -45 -20 -81t-56 -61.5t-83 -40t-101 -14.5q-68 0 -138 22.5t-132 76.5zM608 137v84l274 410h211v-383h105v-111h-105v-137h-155v137h-330zM752 866l338 412l127 -121l-379 -371zM772 248h166v80l10 186h-6l-80 -129z" />
|
||||
<glyph unicode="¿" d="M231 -63q0 68 26 117.5t64.5 92.5t83 81t79 79t56 88t13.5 111h207q12 -70 -6.5 -125t-51 -100t-75.5 -83t-80 -75t-62.5 -75t-25.5 -83q0 -74 50 -119t138 -45q72 0 129.5 30t108.5 77l133 -123q-72 -78 -169 -127t-218 -49q-86 0 -160 21.5t-127 63.5t-83 102.5 t-30 140.5zM467 846q0 84 51 134t127 50t127 -50t51 -134q0 -82 -51 -131t-127 -49t-127 49t-51 131z" />
|
||||
<glyph unicode="À" d="M41 0l434 1339h279l434 -1339h-252l-102 362h-447l-104 -362h-242zM313 1679h258l172 -237h-190zM440 549h340l-47 164q-29 106 -59.5 218.5t-59.5 223.5h-8q-29 -113 -58.5 -224.5t-60.5 -217.5z" />
|
||||
<glyph unicode="Á" d="M41 0l434 1339h279l434 -1339h-252l-102 362h-447l-104 -362h-242zM440 549h340l-47 164q-29 106 -59.5 218.5t-59.5 223.5h-8q-29 -113 -58.5 -224.5t-60.5 -217.5zM485 1442l172 237h258l-239 -237h-191z" />
|
||||
<glyph unicode="Â" d="M41 0l434 1339h279l434 -1339h-252l-102 362h-447l-104 -362h-242zM303 1442l197 237h229l197 -237h-185l-123 131h-8l-123 -131h-184zM440 549h340l-47 164q-29 106 -59.5 218.5t-59.5 223.5h-8q-29 -113 -58.5 -224.5t-60.5 -217.5z" />
|
||||
<glyph unicode="Ã" d="M41 0l434 1339h279l434 -1339h-252l-102 362h-447l-104 -362h-242zM291 1448q12 119 65.5 184.5t130.5 65.5q41 0 75 -15.5t63.5 -34t55.5 -34t52 -15.5q29 0 49.5 24t30.5 75h125q-12 -117 -65.5 -183.5t-131.5 -66.5q-41 0 -74.5 15.5t-63.5 35t-55.5 34.5t-51.5 15 q-29 0 -49.5 -24.5t-30.5 -75.5h-125zM440 549h340l-47 164q-29 106 -59.5 218.5t-59.5 223.5h-8q-29 -113 -58.5 -224.5t-60.5 -217.5z" />
|
||||
<glyph unicode="Ä" d="M41 0l434 1339h279l434 -1339h-252l-102 362h-447l-104 -362h-242zM303 1571q0 53 34 88t87 35t88 -35t35 -88t-35 -87t-88 -34t-87 34t-34 87zM440 549h340l-47 164q-29 106 -59.5 218.5t-59.5 223.5h-8q-29 -113 -58.5 -224.5t-60.5 -217.5zM682 1571q0 53 35 88t88 35 t87 -35t34 -88t-34 -87t-87 -34t-88 34t-35 87z" />
|
||||
<glyph unicode="Å" d="M41 0l434 1339h279l434 -1339h-252l-102 362h-447l-104 -362h-242zM403 1620q0 86 58.5 138t152.5 52q92 0 151.5 -52t59.5 -138q0 -88 -59.5 -139t-151.5 -51q-94 0 -152.5 51t-58.5 139zM440 549h340l-47 164q-29 106 -59.5 218.5t-59.5 223.5h-8 q-29 -113 -58.5 -224.5t-60.5 -217.5zM524 1620q0 -47 25.5 -71.5t64.5 -24.5q37 0 64 24.5t27 71.5q0 45 -27 70.5t-64 25.5q-39 0 -64.5 -25.5t-25.5 -70.5z" />
|
||||
<glyph unicode="Æ" d="M-4 0l479 1339h721v-200h-340v-340h270v-201h-270v-397h358v-201h-571v348h-283l-118 -348h-246zM424 535h219v622h-6q-35 -104 -70 -206.5t-69 -200.5z" />
|
||||
<glyph unicode="Ç" d="M123 666q0 164 47 294t130 220t196.5 137t246.5 47q123 0 219.5 -51t161.5 -117l-133 -147q-49 49 -109.5 78.5t-138.5 29.5q-84 0 -153.5 -33.5t-118.5 -96t-76.5 -153t-27.5 -202.5q0 -115 27.5 -206t77.5 -153.5t121 -96.5t157 -34q82 0 146.5 35t121.5 98l131 -145 q-82 -94 -184.5 -144.5t-229.5 -50.5q-129 0 -240.5 45.5t-194.5 133.5t-130 217t-47 295zM512 -342q78 10 126 31.5t48 62.5q0 33 -29 55.5t-94 38.5l82 160h143l-47 -100q51 -18 86 -52t35 -98q0 -49 -26.5 -83.5t-72.5 -59.5t-106.5 -39t-126.5 -18z" />
|
||||
<glyph unicode="È" d="M213 0v1339h856v-200h-618v-342h524v-201h-524v-395h639v-201h-877zM366 1679h258l172 -237h-190z" />
|
||||
<glyph unicode="É" d="M213 0v1339h856v-200h-618v-342h524v-201h-524v-395h639v-201h-877zM538 1442l172 237h258l-239 -237h-191z" />
|
||||
<glyph unicode="Ê" d="M213 0v1339h856v-200h-618v-342h524v-201h-524v-395h639v-201h-877zM356 1442l197 237h229l197 -237h-185l-123 131h-8l-123 -131h-184z" />
|
||||
<glyph unicode="Ë" d="M213 0v1339h856v-200h-618v-342h524v-201h-524v-395h639v-201h-877zM356 1571q0 53 34 88t87 35t88 -35t35 -88t-35 -87t-88 -34t-87 34t-34 87zM735 1571q0 53 35 88t88 35t87 -35t34 -88t-34 -87t-87 -34t-88 34t-35 87z" />
|
||||
<glyph unicode="Ì" d="M172 0v201h324v938h-324v200h885v-200h-324v-938h324v-201h-885zM313 1679h258l172 -237h-190z" />
|
||||
<glyph unicode="Í" d="M172 0v201h324v938h-324v200h885v-200h-324v-938h324v-201h-885zM485 1442l172 237h258l-239 -237h-191z" />
|
||||
<glyph unicode="Î" d="M172 0v201h324v938h-324v200h885v-200h-324v-938h324v-201h-885zM303 1442l197 237h229l197 -237h-185l-123 131h-8l-123 -131h-184z" />
|
||||
<glyph unicode="Ï" d="M172 0v201h324v938h-324v200h885v-200h-324v-938h324v-201h-885zM303 1571q0 53 34 88t87 35t88 -35t35 -88t-35 -87t-88 -34t-87 34t-34 87zM682 1571q0 53 35 88t88 35t87 -35t34 -88t-34 -87t-87 -34t-88 34t-35 87z" />
|
||||
<glyph unicode="Ð" d="M18 637v111l148 10v581h346q295 0 463 -165.5t168 -497.5q0 -166 -43 -293t-123 -212t-194.5 -128t-258.5 -43h-358v637h-148zM403 193h95q193 0 297 117.5t104 365.5q0 246 -105.5 358.5t-295.5 112.5h-95v-389h256v-121h-256v-444z" />
|
||||
<glyph unicode="Ñ" d="M152 0v1339h241l359 -753l125 -285h6q-8 104 -20.5 224t-12.5 233v581h227v-1339h-241l-359 756l-125 282h-6q8 -106 20.5 -223t12.5 -229v-586h-227zM303 1448q12 119 65.5 184.5t130.5 65.5q41 0 75 -15.5t63.5 -34t55.5 -34t52 -15.5q29 0 49.5 24t30.5 75h125 q-12 -117 -65.5 -183.5t-131.5 -66.5q-41 0 -74.5 15.5t-63.5 35t-55.5 34.5t-51.5 15q-29 0 -49.5 -24.5t-30.5 -75.5h-125z" />
|
||||
<glyph unicode="Ò" d="M88 676q0 162 39 290t107.5 216t166 135t213.5 47q117 0 214.5 -47t166 -135t107.5 -216t39 -290q0 -164 -39 -293t-107.5 -220t-166 -139.5t-214.5 -48.5t-214 48.5t-165.5 139.5t-107.5 220t-39 293zM313 1679h258l172 -237h-190zM332 676q0 -113 19.5 -204t56 -155.5 t88.5 -99.5t118 -35q63 0 115.5 35t89.5 99.5t57.5 155.5t20.5 204q0 225 -77 353t-206 128t-205.5 -128t-76.5 -353z" />
|
||||
<glyph unicode="Ó" d="M88 676q0 162 39 290t107.5 216t166 135t213.5 47q117 0 214.5 -47t166 -135t107.5 -216t39 -290q0 -164 -39 -293t-107.5 -220t-166 -139.5t-214.5 -48.5t-214 48.5t-165.5 139.5t-107.5 220t-39 293zM332 676q0 -113 19.5 -204t56 -155.5t88.5 -99.5t118 -35 q63 0 115.5 35t89.5 99.5t57.5 155.5t20.5 204q0 225 -77 353t-206 128t-205.5 -128t-76.5 -353zM485 1442l172 237h258l-239 -237h-191z" />
|
||||
<glyph unicode="Ô" d="M88 676q0 162 39 290t107.5 216t166 135t213.5 47q117 0 214.5 -47t166 -135t107.5 -216t39 -290q0 -164 -39 -293t-107.5 -220t-166 -139.5t-214.5 -48.5t-214 48.5t-165.5 139.5t-107.5 220t-39 293zM303 1442l197 237h229l197 -237h-185l-123 131h-8l-123 -131h-184z M332 676q0 -113 19.5 -204t56 -155.5t88.5 -99.5t118 -35q63 0 115.5 35t89.5 99.5t57.5 155.5t20.5 204q0 225 -77 353t-206 128t-205.5 -128t-76.5 -353z" />
|
||||
<glyph unicode="Õ" d="M88 676q0 162 39 290t107.5 216t166 135t213.5 47q117 0 214.5 -47t166 -135t107.5 -216t39 -290q0 -164 -39 -293t-107.5 -220t-166 -139.5t-214.5 -48.5t-214 48.5t-165.5 139.5t-107.5 220t-39 293zM291 1448q12 119 65.5 184.5t130.5 65.5q41 0 75 -15.5t63.5 -34 t55.5 -34t52 -15.5q29 0 49.5 24t30.5 75h125q-12 -117 -65.5 -183.5t-131.5 -66.5q-41 0 -74.5 15.5t-63.5 35t-55.5 34.5t-51.5 15q-29 0 -49.5 -24.5t-30.5 -75.5h-125zM332 676q0 -113 19.5 -204t56 -155.5t88.5 -99.5t118 -35q63 0 115.5 35t89.5 99.5t57.5 155.5 t20.5 204q0 225 -77 353t-206 128t-205.5 -128t-76.5 -353z" />
|
||||
<glyph unicode="Ö" d="M88 676q0 162 39 290t107.5 216t166 135t213.5 47q117 0 214.5 -47t166 -135t107.5 -216t39 -290q0 -164 -39 -293t-107.5 -220t-166 -139.5t-214.5 -48.5t-214 48.5t-165.5 139.5t-107.5 220t-39 293zM303 1571q0 53 34 88t87 35t88 -35t35 -88t-35 -87t-88 -34t-87 34 t-34 87zM332 676q0 -113 19.5 -204t56 -155.5t88.5 -99.5t118 -35q63 0 115.5 35t89.5 99.5t57.5 155.5t20.5 204q0 225 -77 353t-206 128t-205.5 -128t-76.5 -353zM682 1571q0 53 35 88t88 35t87 -35t34 -88t-34 -87t-87 -34t-88 34t-35 87z" />
|
||||
<glyph unicode="×" d="M190 367l304 307l-304 309l121 123l303 -311l304 311l120 -123l-303 -309l303 -307l-120 -123l-304 309l-303 -309z" />
|
||||
<glyph unicode="Ø" d="M78 12l129 191q-57 88 -88 206.5t-31 266.5q0 162 39 290t107.5 216t166 135t213.5 47q182 0 312 -111l108 160l129 -86l-133 -194q53 -86 82 -201t29 -256q0 -164 -39 -293t-107.5 -220t-166 -139.5t-214.5 -48.5q-174 0 -303 107l-106 -156zM332 676q0 -141 30 -248 l435 637q-74 92 -183 92q-129 0 -205.5 -128t-76.5 -353zM440 270q70 -88 174 -88q63 0 115.5 35t89.5 99.5t57.5 155.5t20.5 204q0 127 -25 225z" />
|
||||
<glyph unicode="Ù" d="M147 512v827h238v-845q0 -166 61.5 -239t169.5 -73q111 0 174.5 73t63.5 239v845h227v-827q0 -276 -120.5 -406.5t-344.5 -130.5q-219 0 -344 131.5t-125 405.5zM313 1679h258l172 -237h-190z" />
|
||||
<glyph unicode="Ú" d="M147 512v827h238v-845q0 -166 61.5 -239t169.5 -73q111 0 174.5 73t63.5 239v845h227v-827q0 -276 -120.5 -406.5t-344.5 -130.5q-219 0 -344 131.5t-125 405.5zM485 1442l172 237h258l-239 -237h-191z" />
|
||||
<glyph unicode="Û" d="M147 512v827h238v-845q0 -166 61.5 -239t169.5 -73q111 0 174.5 73t63.5 239v845h227v-827q0 -276 -120.5 -406.5t-344.5 -130.5q-219 0 -344 131.5t-125 405.5zM303 1442l197 237h229l197 -237h-185l-123 131h-8l-123 -131h-184z" />
|
||||
<glyph unicode="Ü" d="M147 512v827h238v-845q0 -166 61.5 -239t169.5 -73q111 0 174.5 73t63.5 239v845h227v-827q0 -276 -120.5 -406.5t-344.5 -130.5q-219 0 -344 131.5t-125 405.5zM303 1571q0 53 34 88t87 35t88 -35t35 -88t-35 -87t-88 -34t-87 34t-34 87zM682 1571q0 53 35 88t88 35 t87 -35t34 -88t-34 -87t-87 -34t-88 34t-35 87z" />
|
||||
<glyph unicode="Ý" d="M55 1339h252l168 -360q35 -78 69 -152.5t68 -156.5h9q37 82 71.5 157.5t69.5 153.5l166 358h246l-441 -874v-465h-237v465zM485 1442l172 237h258l-239 -237h-191z" />
|
||||
<glyph unicode="Þ" d="M178 0v1339h238v-213h213q111 0 204 -21.5t160.5 -69.5t106.5 -127t39 -195q0 -113 -39 -195t-107.5 -135t-162 -78.5t-201.5 -25.5h-213v-279h-238zM416 469h192q152 0 223.5 60.5t71.5 183.5q0 125 -73.5 174t-221.5 49h-192v-467z" />
|
||||
<glyph unicode="ß" d="M152 0v1038q0 94 28.5 174t83.5 137.5t137 89t189 31.5q90 0 158.5 -26.5t113.5 -71.5t67.5 -103.5t22.5 -121.5q0 -76 -23.5 -128t-53 -94t-53 -81t-23.5 -84q0 -41 26.5 -71t67.5 -55.5t88 -52t88 -64.5t67.5 -91t26.5 -131q0 -68 -23.5 -126t-68.5 -101t-108.5 -68 t-145.5 -25q-86 0 -154.5 21.5t-134.5 62.5l84 162q51 -35 99.5 -51t97.5 -16q63 0 98 35.5t35 86.5q0 49 -26.5 82t-67.5 60.5t-88 54.5t-88 60.5t-67.5 81t-26.5 116.5q0 66 23.5 113t53 91t53 90t23.5 108q0 66 -38 108.5t-105 42.5q-98 0 -149.5 -69.5t-51.5 -198.5 v-1016h-235z" />
|
||||
<glyph unicode="à" d="M145 270q0 84 39 148.5t123 109.5t215 74t318 41q-6 86 -61.5 141.5t-178.5 55.5q-86 0 -169 -32t-161 -75l-86 158q92 53 211 96t248 43q211 0 321.5 -111.5t110.5 -322.5v-596h-192l-19 125h-6q-80 -61 -177 -105.5t-200 -44.5q-74 0 -135 21.5t-106 60.5t-70 93 t-25 120zM359 1497h246l184 -323h-176zM373 289q0 -68 52 -98.5t128 -30.5t146.5 33.5t140.5 89.5v213q-135 -10 -225.5 -27.5t-143.5 -44.5t-75.5 -60.5t-22.5 -74.5z" />
|
||||
<glyph unicode="á" d="M145 270q0 84 39 148.5t123 109.5t215 74t318 41q-6 86 -61.5 141.5t-178.5 55.5q-86 0 -169 -32t-161 -75l-86 158q92 53 211 96t248 43q211 0 321.5 -111.5t110.5 -322.5v-596h-192l-19 125h-6q-80 -61 -177 -105.5t-200 -44.5q-74 0 -135 21.5t-106 60.5t-70 93 t-25 120zM373 289q0 -68 52 -98.5t128 -30.5t146.5 33.5t140.5 89.5v213q-135 -10 -225.5 -27.5t-143.5 -44.5t-75.5 -60.5t-22.5 -74.5zM486 1174l184 323h246l-254 -323h-176z" />
|
||||
<glyph unicode="â" d="M145 270q0 84 39 148.5t123 109.5t215 74t318 41q-6 86 -61.5 141.5t-178.5 55.5q-86 0 -169 -32t-161 -75l-86 158q92 53 211 96t248 43q211 0 321.5 -111.5t110.5 -322.5v-596h-192l-19 125h-6q-80 -61 -177 -105.5t-200 -44.5q-74 0 -135 21.5t-106 60.5t-70 93 t-25 120zM334 1174l203 323h201l203 -323h-177l-123 198h-8l-123 -198h-176zM373 289q0 -68 52 -98.5t128 -30.5t146.5 33.5t140.5 89.5v213q-135 -10 -225.5 -27.5t-143.5 -44.5t-75.5 -60.5t-22.5 -74.5z" />
|
||||
<glyph unicode="ã" d="M145 270q0 84 39 148.5t123 109.5t215 74t318 41q-6 86 -61.5 141.5t-178.5 55.5q-86 0 -169 -32t-161 -75l-86 158q92 53 211 96t248 43q211 0 321.5 -111.5t110.5 -322.5v-596h-192l-19 125h-6q-80 -61 -177 -105.5t-200 -44.5q-74 0 -135 21.5t-106 60.5t-70 93 t-25 120zM318 1182q14 123 67.5 191.5t141.5 68.5q37 0 67.5 -17.5t57 -38t50 -38t46.5 -17.5q33 0 53.5 25.5t30.5 85.5h125q-14 -123 -67.5 -191.5t-141.5 -68.5q-37 0 -67.5 17.5t-57.5 37.5t-49.5 37.5t-46.5 17.5q-33 0 -53.5 -25.5t-30.5 -84.5h-125zM373 289 q0 -68 52 -98.5t128 -30.5t146.5 33.5t140.5 89.5v213q-135 -10 -225.5 -27.5t-143.5 -44.5t-75.5 -60.5t-22.5 -74.5z" />
|
||||
<glyph unicode="ä" d="M145 270q0 84 39 148.5t123 109.5t215 74t318 41q-6 86 -61.5 141.5t-178.5 55.5q-86 0 -169 -32t-161 -75l-86 158q92 53 211 96t248 43q211 0 321.5 -111.5t110.5 -322.5v-596h-192l-19 125h-6q-80 -61 -177 -105.5t-200 -44.5q-74 0 -135 21.5t-106 60.5t-70 93 t-25 120zM291 1323q0 59 38 99t97 40t98.5 -40t39.5 -99t-39.5 -98t-98.5 -39t-97 39t-38 98zM373 289q0 -68 52 -98.5t128 -30.5t146.5 33.5t140.5 89.5v213q-135 -10 -225.5 -27.5t-143.5 -44.5t-75.5 -60.5t-22.5 -74.5zM711 1323q0 59 39 99t98 40t97.5 -40t38.5 -99 t-38.5 -98t-97.5 -39t-98 39t-39 98z" />
|
||||
<glyph unicode="å" d="M145 270q0 84 39 148.5t123 109.5t215 74t318 41q-6 86 -61.5 141.5t-178.5 55.5q-86 0 -169 -32t-161 -75l-86 158q92 53 211 96t248 43q211 0 321.5 -111.5t110.5 -322.5v-596h-192l-19 125h-6q-80 -61 -177 -105.5t-200 -44.5q-74 0 -135 21.5t-106 60.5t-70 93 t-25 120zM373 289q0 -68 52 -98.5t128 -30.5t146.5 33.5t140.5 89.5v213q-135 -10 -225.5 -27.5t-143.5 -44.5t-75.5 -60.5t-22.5 -74.5zM426 1315q0 92 58.5 146.5t152.5 54.5t152.5 -54.5t58.5 -146.5q0 -90 -58.5 -144.5t-152.5 -54.5t-152.5 54.5t-58.5 144.5zM547 1315 q0 -47 25.5 -76t64.5 -29t65 29t26 76t-26 76.5t-65 29.5t-64.5 -29.5t-25.5 -76.5z" />
|
||||
<glyph unicode="æ" d="M33 274q0 154 120.5 241t364.5 128q-4 92 -36.5 145.5t-110.5 53.5q-47 0 -108.5 -22.5t-112.5 -55.5l-87 158q74 45 156 76.5t172 31.5q94 0 152.5 -48t89.5 -126q51 86 116.5 130t159.5 44q76 0 134.5 -38t97.5 -102.5t58.5 -150.5t19.5 -182q0 -35 -3.5 -65.5 t-7.5 -53.5h-497q12 -129 66 -202.5t153 -73.5q49 0 92 15.5t84 43.5l82 -153q-55 -41 -126 -67t-140 -26q-111 0 -184.5 46.5t-123.5 125.5q-84 -86 -157.5 -129t-149.5 -43q-129 0 -201.5 83t-72.5 216zM248 291q0 -66 32.5 -98.5t88.5 -32.5q41 0 90 27.5t90 78.5 q-14 41 -21.5 88t-9.5 95l-2 47q-143 -27 -205.5 -78t-62.5 -127zM709 588h319q0 53 -7 100t-23.5 83t-43 56.5t-67.5 20.5q-72 0 -120 -65.5t-58 -194.5z" />
|
||||
<glyph unicode="ç" d="M150 502q0 125 46 223t123.5 165.5t182 103.5t219.5 36q123 0 216 -41t157 -100l-113 -148q-59 47 -119.5 72t-128.5 25q-152 0 -246 -91.5t-94 -244.5q0 -76 24.5 -137.5t68.5 -105.5t105.5 -67.5t135.5 -23.5q86 0 156.5 32.5t130.5 78.5l98 -152q-86 -76 -192.5 -114 t-214.5 -38q-119 0 -220.5 35t-176.5 102.5t-116.5 166t-41.5 223.5zM506 -342q78 10 127 31.5t49 62.5q0 33 -29.5 55.5t-95.5 38.5l82 160h143l-47 -100q51 -18 86 -52t35 -98q0 -49 -26.5 -83.5t-72.5 -59.5t-105.5 -39t-125.5 -18z" />
|
||||
<glyph unicode="è" d="M127 502q0 123 44 221t115.5 166.5t165 104.5t191.5 36q113 0 200 -36t146.5 -98t90 -150.5t30.5 -192.5q0 -35 -3 -65.5t-7 -49.5h-731q12 -135 106 -207.5t232 -72.5q78 0 145.5 21.5t136.5 60.5l80 -148q-82 -51 -182.5 -84t-210.5 -33q-115 0 -215.5 36t-174 103.5 t-116.5 165t-43 222.5zM365 1497h246l184 -323h-176zM367 598h526q0 117 -61.5 183.5t-182.5 66.5q-102 0 -182 -63.5t-100 -186.5z" />
|
||||
<glyph unicode="é" d="M127 502q0 123 44 221t115.5 166.5t165 104.5t191.5 36q113 0 200 -36t146.5 -98t90 -150.5t30.5 -192.5q0 -35 -3 -65.5t-7 -49.5h-731q12 -135 106 -207.5t232 -72.5q78 0 145.5 21.5t136.5 60.5l80 -148q-82 -51 -182.5 -84t-210.5 -33q-115 0 -215.5 36t-174 103.5 t-116.5 165t-43 222.5zM367 598h526q0 117 -61.5 183.5t-182.5 66.5q-102 0 -182 -63.5t-100 -186.5zM492 1174l184 323h246l-254 -323h-176z" />
|
||||
<glyph unicode="ê" d="M127 502q0 123 44 221t115.5 166.5t165 104.5t191.5 36q113 0 200 -36t146.5 -98t90 -150.5t30.5 -192.5q0 -35 -3 -65.5t-7 -49.5h-731q12 -135 106 -207.5t232 -72.5q78 0 145.5 21.5t136.5 60.5l80 -148q-82 -51 -182.5 -84t-210.5 -33q-115 0 -215.5 36t-174 103.5 t-116.5 165t-43 222.5zM340 1174l203 323h201l203 -323h-177l-123 198h-8l-123 -198h-176zM367 598h526q0 117 -61.5 183.5t-182.5 66.5q-102 0 -182 -63.5t-100 -186.5z" />
|
||||
<glyph unicode="ë" d="M127 502q0 123 44 221t115.5 166.5t165 104.5t191.5 36q113 0 200 -36t146.5 -98t90 -150.5t30.5 -192.5q0 -35 -3 -65.5t-7 -49.5h-731q12 -135 106 -207.5t232 -72.5q78 0 145.5 21.5t136.5 60.5l80 -148q-82 -51 -182.5 -84t-210.5 -33q-115 0 -215.5 36t-174 103.5 t-116.5 165t-43 222.5zM295 1323q0 59 38 99t97 40t98.5 -40t39.5 -99t-39.5 -98t-98.5 -39t-97 39t-38 98zM367 598h526q0 117 -61.5 183.5t-182.5 66.5q-102 0 -182 -63.5t-100 -186.5zM715 1323q0 59 39 99t98 40t97.5 -40t38.5 -99t-38.5 -98t-97.5 -39t-98 39t-39 98z " />
|
||||
<glyph unicode="ì" d="M172 819v187h672v-1006h-236v819h-436zM430 1497h246l184 -323h-176z" />
|
||||
<glyph unicode="í" d="M172 819v187h672v-1006h-236v819h-436zM557 1174l184 323h246l-254 -323h-176z" />
|
||||
<glyph unicode="î" d="M172 819v187h672v-1006h-236v819h-436zM405 1174l203 323h201l203 -323h-177l-123 198h-8l-123 -198h-176z" />
|
||||
<glyph unicode="ï" d="M172 819v187h672v-1006h-236v819h-436zM362 1323q0 59 38 99t97 40t98.5 -40t39.5 -99t-39.5 -98t-98.5 -39t-97 39t-38 98zM782 1323q0 59 39 99t98 40t97.5 -40t38.5 -99t-38.5 -98t-97.5 -39t-98 39t-39 98z" />
|
||||
<glyph unicode="ð" d="M109 461q0 106 35.5 192t99 146.5t146.5 92.5t179 32q84 0 161 -34t130 -104q-29 117 -84 206t-137 161l-307 -156l-66 109l260 133q-49 33 -103 62.5t-116 58.5l105 145q80 -37 155.5 -79t143.5 -93l278 142l64 -111l-234 -119q66 -61 120 -135t92 -162t59.5 -190.5 t21.5 -220.5q0 -123 -35 -226.5t-100.5 -178.5t-158.5 -116t-212 -41q-100 0 -190 34t-159 96.5t-108.5 152.5t-39.5 203zM328 461q0 -68 22.5 -122t61.5 -92t91 -58.5t111 -20.5q135 0 205 95t70 261v42.5t-2 39.5q-63 78 -133 108.5t-142 30.5q-137 0 -210.5 -75.5 t-73.5 -208.5z" />
|
||||
<glyph unicode="ñ" d="M168 0v1006h194l19 -152h8q72 72 159 124t204 52q174 0 256 -105.5t82 -301.5v-623h-238v592q0 123 -42 179t-146 56q-74 0 -131.5 -34.5t-126.5 -106.5v-686h-238zM328 1182q14 123 67.5 191.5t141.5 68.5q37 0 67.5 -17.5t57 -38t50 -38t46.5 -17.5q33 0 53.5 25.5 t30.5 85.5h125q-14 -123 -67.5 -191.5t-141.5 -68.5q-37 0 -67.5 17.5t-57.5 37.5t-49.5 37.5t-46.5 17.5q-33 0 -53.5 -25.5t-30.5 -84.5h-125z" />
|
||||
<glyph unicode="ò" d="M109 502q0 125 42 223t111.5 165.5t161.5 103.5t190 36t190.5 -36t162 -103.5t111.5 -165.5t42 -223t-42 -223.5t-111.5 -166t-161.5 -102.5t-191 -35q-98 0 -190 35t-161.5 102.5t-111.5 166t-42 223.5zM336 1497h246l184 -323h-176zM352 502q0 -152 70 -243t192 -91 q123 0 193 91t70 243q0 154 -70 245t-193 91t-192.5 -91t-69.5 -245z" />
|
||||
<glyph unicode="ó" d="M109 502q0 125 42 223t111.5 165.5t161.5 103.5t190 36t190.5 -36t162 -103.5t111.5 -165.5t42 -223t-42 -223.5t-111.5 -166t-161.5 -102.5t-191 -35q-98 0 -190 35t-161.5 102.5t-111.5 166t-42 223.5zM352 502q0 -152 70 -243t192 -91q123 0 193 91t70 243 q0 154 -70 245t-193 91t-192.5 -91t-69.5 -245zM463 1174l184 323h246l-254 -323h-176z" />
|
||||
<glyph unicode="ô" d="M109 502q0 125 42 223t111.5 165.5t161.5 103.5t190 36t190.5 -36t162 -103.5t111.5 -165.5t42 -223t-42 -223.5t-111.5 -166t-161.5 -102.5t-191 -35q-98 0 -190 35t-161.5 102.5t-111.5 166t-42 223.5zM311 1174l203 323h201l203 -323h-177l-123 198h-8l-123 -198h-176 zM352 502q0 -152 70 -243t192 -91q123 0 193 91t70 243q0 154 -70 245t-193 91t-192.5 -91t-69.5 -245z" />
|
||||
<glyph unicode="õ" d="M109 502q0 125 42 223t111.5 165.5t161.5 103.5t190 36t190.5 -36t162 -103.5t111.5 -165.5t42 -223t-42 -223.5t-111.5 -166t-161.5 -102.5t-191 -35q-98 0 -190 35t-161.5 102.5t-111.5 166t-42 223.5zM295 1182q14 123 67.5 191.5t141.5 68.5q37 0 67.5 -17.5t57 -38 t50 -38t46.5 -17.5q33 0 53.5 25.5t30.5 85.5h125q-14 -123 -67.5 -191.5t-141.5 -68.5q-37 0 -67.5 17.5t-57.5 37.5t-49.5 37.5t-46.5 17.5q-33 0 -53.5 -25.5t-30.5 -84.5h-125zM352 502q0 -152 70 -243t192 -91q123 0 193 91t70 243q0 154 -70 245t-193 91t-192.5 -91 t-69.5 -245z" />
|
||||
<glyph unicode="ö" d="M109 502q0 125 42 223t111.5 165.5t161.5 103.5t190 36t190.5 -36t162 -103.5t111.5 -165.5t42 -223t-42 -223.5t-111.5 -166t-161.5 -102.5t-191 -35q-98 0 -190 35t-161.5 102.5t-111.5 166t-42 223.5zM268 1323q0 59 38 99t97 40t98.5 -40t39.5 -99t-39.5 -98 t-98.5 -39t-97 39t-38 98zM352 502q0 -152 70 -243t192 -91q123 0 193 91t70 243q0 154 -70 245t-193 91t-192.5 -91t-69.5 -245zM688 1323q0 59 39 99t98 40t97.5 -40t38.5 -99t-38.5 -98t-97.5 -39t-98 39t-39 98z" />
|
||||
<glyph unicode="÷" d="M160 590v172h909v-172h-909zM475 307q0 59 40 96t99 37t99.5 -36.5t40.5 -96.5q0 -59 -40 -97t-100 -38q-59 0 -99 38t-40 97zM475 1044q0 59 40 96.5t99 37.5t99.5 -37t40.5 -97q0 -59 -40.5 -97t-99.5 -38t-99 38t-40 97z" />
|
||||
<glyph unicode="ø" d="M109 502q0 125 42 223t111.5 165.5t161.5 103.5t190 36q162 0 291 -88l99 115l94 -74l-103 -121q57 -66 91 -157t34 -203q0 -125 -42 -223.5t-111.5 -166t-161.5 -102.5t-191 -35q-80 0 -154.5 22.5t-137.5 68.5l-97 -115l-94 74l102 120q-59 66 -91.5 156t-32.5 201z M352 502q0 -102 31 -178l391 462q-31 29 -71.5 44.5t-88.5 15.5q-61 0 -110 -25.5t-83 -70.5t-51.5 -108.5t-17.5 -139.5zM453 221q68 -61 161 -61q61 0 109.5 25.5t82.5 70.5t52.5 107.5t18.5 138.5q0 104 -33 182z" />
|
||||
<glyph unicode="ù" d="M143 383v623h236v-592q0 -123 42 -179.5t146 -56.5q72 0 128.5 33t123.5 117v678h236v-1006h-193l-18 160h-8q-72 -84 -158 -134.5t-199 -50.5q-176 0 -256 105.5t-80 302.5zM328 1497h246l184 -323h-176z" />
|
||||
<glyph unicode="ú" d="M143 383v623h236v-592q0 -123 42 -179.5t146 -56.5q72 0 128.5 33t123.5 117v678h236v-1006h-193l-18 160h-8q-72 -84 -158 -134.5t-199 -50.5q-176 0 -256 105.5t-80 302.5zM455 1174l184 323h246l-254 -323h-176z" />
|
||||
<glyph unicode="û" d="M143 383v623h236v-592q0 -123 42 -179.5t146 -56.5q72 0 128.5 33t123.5 117v678h236v-1006h-193l-18 160h-8q-72 -84 -158 -134.5t-199 -50.5q-176 0 -256 105.5t-80 302.5zM303 1174l203 323h201l203 -323h-177l-123 198h-8l-123 -198h-176z" />
|
||||
<glyph unicode="ü" d="M143 383v623h236v-592q0 -123 42 -179.5t146 -56.5q72 0 128.5 33t123.5 117v678h236v-1006h-193l-18 160h-8q-72 -84 -158 -134.5t-199 -50.5q-176 0 -256 105.5t-80 302.5zM260 1323q0 59 38 99t97 40t98.5 -40t39.5 -99t-39.5 -98t-98.5 -39t-97 39t-38 98zM680 1323 q0 59 39 99t98 40t97.5 -40t38.5 -99t-38.5 -98t-97.5 -39t-98 39t-39 98z" />
|
||||
<glyph unicode="ý" d="M82 1006h235l207 -494q29 -68 55.5 -143.5t55.5 -151.5h8q23 72 49.5 147.5t50.5 147.5l183 494h223l-420 -1051q-33 -86 -73 -153.5t-93 -115.5t-122.5 -74t-159.5 -26q-41 0 -78 5.5t-66 15.5l45 184q20 -6 41 -10t43 -4q96 0 153.5 48t88.5 124l24 63zM475 1174 l184 323h246l-254 -323h-176z" />
|
||||
<glyph unicode="þ" d="M168 -397v1843h238v-383l-9 -166q68 61 148 97t164 36q96 0 172.5 -36t130 -103.5t81 -161.5t27.5 -211q0 -129 -38 -230.5t-100 -170t-142 -105.5t-166 -37q-74 0 -144.5 33t-130.5 86l7 -176v-315h-238zM406 262q55 -51 113.5 -71.5t105.5 -20.5q104 0 178 87t74 259 q0 152 -55.5 236t-178.5 84q-57 0 -116.5 -29t-120.5 -92v-453z" />
|
||||
<glyph unicode="ÿ" d="M82 1006h235l207 -494q29 -68 55.5 -143.5t55.5 -151.5h8q23 72 49.5 147.5t50.5 147.5l183 494h223l-420 -1051q-33 -86 -73 -153.5t-93 -115.5t-122.5 -74t-159.5 -26q-41 0 -78 5.5t-66 15.5l45 184q20 -6 41 -10t43 -4q96 0 153.5 48t88.5 124l24 63zM280 1323 q0 59 38 99t97 40t98.5 -40t39.5 -99t-39.5 -98t-98.5 -39t-97 39t-38 98zM700 1323q0 59 39 99t98 40t97.5 -40t38.5 -99t-38.5 -98t-97.5 -39t-98 39t-39 98z" />
|
||||
<glyph unicode="Œ" d="M53 676q0 166 40 290t113 206.5t175 124.5t227 42h586v-198h-363v-344h275v-201h-275v-395h383v-201h-616q-123 0 -223.5 43t-172 128t-110.5 212t-39 293zM297 676q0 -250 72.5 -366.5t212.5 -116.5h36v954h-36q-139 0 -212 -110.5t-73 -360.5z" />
|
||||
<glyph unicode="œ" d="M23 504q0 125 28.5 223t79.5 166t118.5 102.5t145.5 34.5q92 0 157.5 -54t104.5 -153q41 98 106.5 152.5t151.5 54.5q74 0 130.5 -38t94.5 -102.5t57.5 -150.5t19.5 -182q0 -35 -2.5 -60.5t-6.5 -58.5h-475q12 -129 65.5 -202.5t145.5 -73.5q47 0 84 15.5t78 43.5 l82 -153q-53 -41 -120 -67t-134 -26q-88 0 -160 49.5t-115 143.5q-45 -94 -109.5 -143.5t-158.5 -49.5q-78 0 -145.5 35t-116.5 102.5t-77.5 166t-28.5 225.5zM238 504q0 -154 42 -246t126 -92q74 0 115.5 92t41.5 246q0 152 -42 244t-115 92q-84 0 -126 -92t-42 -244z M731 588h297q0 51 -6 99t-21.5 83t-41 56.5t-62.5 21.5q-72 0 -114 -67.5t-52 -192.5z" />
|
||||
<glyph unicode="Ÿ" d="M55 1339h252l168 -360q35 -78 69 -152.5t68 -156.5h9q37 82 71.5 157.5t69.5 153.5l166 358h246l-441 -874v-465h-237v465zM303 1571q0 53 34 88t87 35t88 -35t35 -88t-35 -87t-88 -34t-87 34t-34 87zM682 1571q0 53 35 88t88 35t87 -35t34 -88t-34 -87t-87 -34t-88 34 t-35 87z" />
|
||||
<glyph unicode="ˆ" d="M311 1174l203 323h201l203 -323h-177l-123 198h-8l-123 -198h-176z" />
|
||||
<glyph unicode="˜" d="M295 1182q14 123 67.5 191.5t141.5 68.5q37 0 67.5 -17.5t57 -38t50 -38t46.5 -17.5q33 0 53.5 25.5t30.5 85.5h125q-14 -123 -67.5 -191.5t-141.5 -68.5q-37 0 -67.5 17.5t-57.5 37.5t-49.5 37.5t-46.5 17.5q-33 0 -53.5 -25.5t-30.5 -84.5h-125z" />
|
||||
<glyph unicode=" " horiz-adv-x="905" />
|
||||
<glyph unicode=" " horiz-adv-x="1810" />
|
||||
<glyph unicode=" " horiz-adv-x="905" />
|
||||
<glyph unicode=" " horiz-adv-x="1810" />
|
||||
<glyph unicode=" " horiz-adv-x="603" />
|
||||
<glyph unicode=" " horiz-adv-x="452" />
|
||||
<glyph unicode=" " horiz-adv-x="301" />
|
||||
<glyph unicode=" " horiz-adv-x="301" />
|
||||
<glyph unicode=" " horiz-adv-x="226" />
|
||||
<glyph unicode=" " horiz-adv-x="362" />
|
||||
<glyph unicode=" " horiz-adv-x="100" />
|
||||
<glyph unicode="‐" d="M160 590v172h909v-172h-909z" />
|
||||
<glyph unicode="‑" d="M160 590v172h909v-172h-909z" />
|
||||
<glyph unicode="‒" d="M160 590v172h909v-172h-909z" />
|
||||
<glyph unicode="–" d="M164 424v188h901v-188h-901z" />
|
||||
<glyph unicode="—" d="M41 424v188h1147v-188h-1147z" />
|
||||
<glyph unicode="‘" d="M401 942q0 160 84 285t244 203l68 -130q-100 -53 -156.5 -127.5t-62.5 -177.5q18 8 45 9q74 0 119.5 -46.5t45.5 -119.5q0 -84 -48 -134.5t-126 -50.5q-98 0 -155.5 77t-57.5 212z" />
|
||||
<glyph unicode="’" d="M430 782q100 53 156.5 128t64.5 177q-23 -8 -49 -8q-72 0 -119 46t-47 120q0 84 49.5 134.5t124.5 50.5q98 0 155.5 -77t57.5 -212q0 -160 -83 -285t-242 -203z" />
|
||||
<glyph unicode="‚" d="M430 -338q100 53 156.5 128t64.5 177q-23 -8 -49 -8q-72 0 -119 46t-47 120q0 84 49.5 134.5t124.5 50.5q98 0 155.5 -77t57.5 -212q0 -160 -83 -285t-242 -203z" />
|
||||
<glyph unicode="“" d="M149 942q0 160 84 285t244 203l68 -130q-100 -53 -156.5 -127.5t-62.5 -177.5q18 8 45 9q74 0 119.5 -46.5t45.5 -119.5q0 -84 -48 -134.5t-126 -50.5q-98 0 -155.5 77t-57.5 212zM655 942q0 160 84 285t244 203l68 -130q-100 -53 -156.5 -127.5t-62.5 -177.5q18 8 45 9 q74 0 119.5 -46.5t45.5 -119.5q0 -84 -48 -134.5t-126 -50.5q-98 0 -155.5 77t-57.5 212z" />
|
||||
<glyph unicode="”" d="M178 782q100 53 156.5 128t64.5 177q-23 -8 -49 -8q-72 0 -119 46t-47 120q0 84 49.5 134.5t124.5 50.5q98 0 155.5 -77t57.5 -212q0 -160 -83 -285t-242 -203zM684 782q100 53 156.5 128t64.5 177q-23 -8 -49 -8q-72 0 -119 46t-47 120q0 84 49.5 134.5t124.5 50.5 q98 0 155.5 -77t57.5 -212q0 -160 -83 -285t-242 -203z" />
|
||||
<glyph unicode="„" d="M178 -338q100 53 156.5 128t64.5 177q-23 -8 -49 -8q-72 0 -119 46t-47 120q0 84 49.5 134.5t124.5 50.5q98 0 155.5 -77t57.5 -212q0 -160 -83 -285t-242 -203zM684 -338q100 53 156.5 128t64.5 177q-23 -8 -49 -8q-72 0 -119 46t-47 120q0 84 49.5 134.5t124.5 50.5 q98 0 155.5 -77t57.5 -212q0 -160 -83 -285t-242 -203z" />
|
||||
<glyph unicode="•" d="M295 537q0 70 25.5 127t68.5 99t101.5 65.5t123.5 23.5q66 0 124.5 -23.5t101.5 -65.5t68.5 -99.5t25.5 -126.5q0 -70 -25.5 -127.5t-68.5 -99.5t-101.5 -65.5t-124.5 -23.5t-124 23.5t-101 65.5t-68.5 99.5t-25.5 127.5z" />
|
||||
<glyph unicode="…" d="M31 152q0 76 47 126t119 50t118.5 -50.5t46.5 -125.5q0 -78 -47 -127.5t-118 -49.5q-72 0 -119 49.5t-47 127.5zM449 152q0 76 47 126t118 50q72 0 119 -50.5t47 -125.5q0 -78 -47 -127.5t-119 -49.5t-118.5 49.5t-46.5 127.5zM866 152q0 76 47.5 126t118.5 50 q72 0 119 -50.5t47 -125.5q0 -78 -47 -127.5t-119 -49.5t-119 49.5t-47 127.5z" />
|
||||
<glyph unicode=" " horiz-adv-x="362" />
|
||||
<glyph unicode="‹" d="M381 399v234l332 321l104 -94l-260 -344l260 -346l-104 -92z" />
|
||||
<glyph unicode="›" d="M412 170l260 346l-260 344l104 94l332 -321v-234l-332 -321z" />
|
||||
<glyph unicode=" " horiz-adv-x="452" />
|
||||
<glyph unicode="€" d="M104 461v114l117 9q-2 16 -2 31.5v31.5v30.5t2 29.5h-117v114l132 8q20 119 70 211.5t123 156t167 97t203 33.5q104 0 198.5 -43t161.5 -117l-133 -129q-51 47 -107.5 76t-127.5 29q-129 0 -208 -82t-108 -230h559v-124h-573q-2 -12 -2 -24.5v-27.5v-35.5t2 -33.5h491 v-125h-475q33 -143 111 -222t198 -79q80 0 141.5 32.5t116.5 96.5l134 -123q-82 -92 -180.5 -141.5t-223.5 -49.5q-102 0 -191 32t-159 93.5t-119 152.5t-69 208h-132z" />
|
||||
<glyph unicode="™" d="M-18 1239v145h518v-145h-181v-494h-159v494h-178zM580 745v639h178l82 -213l49 -155h8l47 155l80 213h178v-639h-141v238l16 225h-8l-127 -342h-102l-127 342h-8l18 -225v-238h-143z" />
|
||||
<glyph unicode="◼" horiz-adv-x="1003" d="M0 0v1004h1004v-1004h-1004z" />
|
||||
<glyph unicode="fi" d="M59 819v176l136 11v104q0 78 18 144.5t59 114.5t103.5 74.5t152.5 26.5q55 0 102.5 -10t82.5 -24l-45 -176q-55 25 -109 24q-129 0 -129 -170v-108h197v-187h-197v-819h-235v819h-136zM782 1341q0 72 50 117t124 45q76 0 125 -45t49 -117t-49 -115.5t-125 -43.5 q-74 0 -124 44t-50 115zM840 0v1006h237v-1006h-237z" />
|
||||
<glyph unicode="fl" d="M70 819v176l135 11v104q0 78 18.5 144.5t58.5 114.5t102 74.5t153 26.5q55 0 102 -10t82 -24l-45 -176q-53 25 -109 24q-127 0 -127 -170v-108h197v-187h-197v-819h-235v819h-135zM813 258v1188h236v-1200q0 -43 16 -60.5t35 -17.5h15t22 4l28 -176q-23 -10 -52.5 -15.5 t-72.5 -5.5q-125 0 -176 76t-51 207z" />
|
||||
</font>
|
||||
</defs></svg>
|
Before Width: | Height: | Size: 58 KiB |
Binary file not shown.
Binary file not shown.
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Before Width: | Height: | Size: 103 KiB |
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user