first commit
This commit is contained in:
@@ -0,0 +1,11 @@
|
||||
import { LRLanguage } from '@codemirror/language'
|
||||
import { parser } from '../../lezer-bibtex/bibtex.mjs'
|
||||
import { bibtexEntryCompletions } from './completions/snippets'
|
||||
|
||||
export const BibTeXLanguage = LRLanguage.define({
|
||||
name: 'bibtex',
|
||||
parser,
|
||||
languageData: {
|
||||
autocomplete: bibtexEntryCompletions,
|
||||
},
|
||||
})
|
||||
@@ -0,0 +1,95 @@
|
||||
import { CompletionContext, snippet } from '@codemirror/autocomplete'
|
||||
|
||||
type Environment = {
|
||||
name: string
|
||||
requiredAttributes: string[]
|
||||
}
|
||||
|
||||
const environments: Environment[] = [
|
||||
{
|
||||
name: 'article',
|
||||
requiredAttributes: ['author', 'title', 'journal', 'year'],
|
||||
},
|
||||
{
|
||||
name: 'book',
|
||||
requiredAttributes: ['author', 'title', 'publisher', 'year'],
|
||||
},
|
||||
{
|
||||
name: 'booklet',
|
||||
requiredAttributes: ['key', 'title'],
|
||||
},
|
||||
{
|
||||
name: 'conference',
|
||||
requiredAttributes: ['author', 'booktitle', 'title', 'year'],
|
||||
},
|
||||
{
|
||||
name: 'inbook',
|
||||
requiredAttributes: ['author', 'title', 'publisher', 'year', 'chapter'],
|
||||
},
|
||||
{
|
||||
name: 'incollection',
|
||||
requiredAttributes: ['author', 'title', 'booktitle', 'publisher', 'year'],
|
||||
},
|
||||
{
|
||||
name: 'inproceedings',
|
||||
requiredAttributes: ['author', 'title', 'booktitle', 'year'],
|
||||
},
|
||||
{
|
||||
name: 'manual',
|
||||
requiredAttributes: ['key', 'title'],
|
||||
},
|
||||
{
|
||||
name: 'mastersthesis',
|
||||
requiredAttributes: ['author', 'title', 'school', 'year'],
|
||||
},
|
||||
{
|
||||
name: 'misc',
|
||||
requiredAttributes: ['key', 'note'],
|
||||
},
|
||||
{
|
||||
name: 'phdthesis',
|
||||
requiredAttributes: ['author', 'title', 'school', 'year'],
|
||||
},
|
||||
{
|
||||
name: 'proceedings',
|
||||
requiredAttributes: ['key', 'title', 'year'],
|
||||
},
|
||||
{
|
||||
name: 'techreport',
|
||||
requiredAttributes: ['author', 'title', 'institution', 'year'],
|
||||
},
|
||||
{
|
||||
name: 'unpublished',
|
||||
requiredAttributes: ['author', 'title', 'note'],
|
||||
},
|
||||
]
|
||||
|
||||
const prepareSnippet = (environment: Environment) => {
|
||||
return `@${
|
||||
environment.name
|
||||
}{#{citation-key},${environment.requiredAttributes.map(
|
||||
attribute => `
|
||||
${attribute} = #{}`
|
||||
)}
|
||||
}`
|
||||
}
|
||||
|
||||
export function bibtexEntryCompletions(context: CompletionContext) {
|
||||
const word = context.matchBefore(/@\w*/)
|
||||
if (word?.from === word?.to && !context.explicit) return null
|
||||
return {
|
||||
from: word?.from ?? context.pos,
|
||||
options: [
|
||||
...environments.map(env => ({
|
||||
label: `@${env.name}`,
|
||||
type: 'snippet',
|
||||
apply: snippet(prepareSnippet(env)),
|
||||
})),
|
||||
{
|
||||
label: '@string',
|
||||
type: 'snippet',
|
||||
apply: snippet('@string{#{string-key} = #{}}'),
|
||||
},
|
||||
],
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,7 @@
|
||||
import { LanguageSupport } from '@codemirror/language'
|
||||
import { BibTeXLanguage } from './bibtex-language'
|
||||
import { bibtexLinter } from './linting'
|
||||
|
||||
export const bibtex = () => {
|
||||
return new LanguageSupport(BibTeXLanguage, [bibtexLinter()])
|
||||
}
|
||||
@@ -0,0 +1,292 @@
|
||||
import { syntaxTree } from '@codemirror/language'
|
||||
import { Diagnostic, LintSource } from '@codemirror/lint'
|
||||
import {
|
||||
Declaration,
|
||||
EntryName,
|
||||
EntryTypeName,
|
||||
FieldName,
|
||||
Other,
|
||||
} from '../../lezer-bibtex/bibtex.terms.mjs'
|
||||
import { SyntaxNodeRef } from '@lezer/common'
|
||||
import { EditorState } from '@codemirror/state'
|
||||
import { createLinter } from '../../extensions/linting'
|
||||
|
||||
type BibEntryValidationRule = {
|
||||
requiredAttributes: (string | string[])[]
|
||||
biblatex?: Record<string, string>
|
||||
}
|
||||
|
||||
export const bibtexLinter = () => createLinter(bibtexLintSource, { delay: 100 })
|
||||
|
||||
export const bibtexLintSource: LintSource = view => {
|
||||
const tree = syntaxTree(view.state)
|
||||
|
||||
const diagnostics: Diagnostic[] = []
|
||||
|
||||
// Linting be temporarily disabled by a %%begin novalidate directive. It can
|
||||
// be re-enabled by a %%end novalidate directive
|
||||
let lintingCurrentlyDisabled = false
|
||||
|
||||
// Linting is completely disabled by a %%novalidate so will return no linter
|
||||
// errors
|
||||
let fileLintingDisabled = false
|
||||
|
||||
tree.iterate({
|
||||
enter(node) {
|
||||
if (fileLintingDisabled) {
|
||||
return false
|
||||
}
|
||||
if (node.type.is(Other)) {
|
||||
// Content between declaration. Can be linter directive
|
||||
const content = view.state.sliceDoc(node.from, node.to).trim()
|
||||
if (content === '%%novalidate') {
|
||||
fileLintingDisabled = true
|
||||
} else if (content === '%%begin novalidate') {
|
||||
lintingCurrentlyDisabled = true
|
||||
} else if (content === '%%end novalidate') {
|
||||
lintingCurrentlyDisabled = false
|
||||
}
|
||||
}
|
||||
if (lintingCurrentlyDisabled) {
|
||||
return false
|
||||
}
|
||||
if (node.type.is(Declaration)) {
|
||||
diagnostics.push(...checkRequiredFields(node, view.state))
|
||||
return false
|
||||
}
|
||||
},
|
||||
})
|
||||
|
||||
if (fileLintingDisabled) {
|
||||
return []
|
||||
} else {
|
||||
return diagnostics
|
||||
}
|
||||
}
|
||||
|
||||
const bibEntryValidationRules = new Map<string, BibEntryValidationRule>([
|
||||
[
|
||||
'article',
|
||||
{
|
||||
requiredAttributes: ['author', 'title', 'journal', 'year'],
|
||||
biblatex: {
|
||||
journal: 'journaltitle',
|
||||
year: 'date',
|
||||
},
|
||||
},
|
||||
],
|
||||
[
|
||||
'book',
|
||||
{
|
||||
requiredAttributes: [['author', 'editor'], 'title', 'publisher', 'year'],
|
||||
biblatex: {
|
||||
year: 'date',
|
||||
},
|
||||
},
|
||||
],
|
||||
[
|
||||
'booklet',
|
||||
{
|
||||
requiredAttributes: [['author', 'key'], 'title'],
|
||||
},
|
||||
],
|
||||
[
|
||||
'conference',
|
||||
{
|
||||
requiredAttributes: ['author', 'title', 'year', 'booktitle'],
|
||||
biblatex: {
|
||||
year: 'date',
|
||||
},
|
||||
},
|
||||
],
|
||||
[
|
||||
'inbook',
|
||||
{
|
||||
requiredAttributes: ['author', 'title', 'publisher', 'year'],
|
||||
biblatex: {
|
||||
year: 'date',
|
||||
},
|
||||
},
|
||||
],
|
||||
[
|
||||
'incollection',
|
||||
{
|
||||
requiredAttributes: ['author', 'title', 'booktitle', 'publisher', 'year'],
|
||||
biblatex: {
|
||||
year: 'date',
|
||||
},
|
||||
},
|
||||
],
|
||||
[
|
||||
'inproceedings',
|
||||
{
|
||||
requiredAttributes: ['author', 'title', 'booktitle', 'year'],
|
||||
biblatex: {
|
||||
year: 'date',
|
||||
},
|
||||
},
|
||||
],
|
||||
[
|
||||
'manual',
|
||||
{
|
||||
requiredAttributes: [['author', 'key', 'organization'], 'title'],
|
||||
},
|
||||
],
|
||||
[
|
||||
'mastersthesis',
|
||||
{
|
||||
requiredAttributes: ['author', 'title', 'school', 'year'],
|
||||
biblatex: {
|
||||
year: 'date',
|
||||
},
|
||||
},
|
||||
],
|
||||
[
|
||||
'misc',
|
||||
{
|
||||
requiredAttributes: [['author', 'key']],
|
||||
},
|
||||
],
|
||||
[
|
||||
'phdthesis',
|
||||
{
|
||||
requiredAttributes: ['author', 'title', 'school', 'year'],
|
||||
biblatex: {
|
||||
year: 'date',
|
||||
},
|
||||
},
|
||||
],
|
||||
[
|
||||
'proceedings',
|
||||
{
|
||||
requiredAttributes: [['editor', 'key', 'organization'], 'title', 'year'],
|
||||
biblatex: {
|
||||
year: 'date',
|
||||
},
|
||||
},
|
||||
],
|
||||
[
|
||||
'techreport',
|
||||
{
|
||||
requiredAttributes: ['author', 'title', 'institution', 'year'],
|
||||
biblatex: {
|
||||
year: 'date',
|
||||
},
|
||||
},
|
||||
],
|
||||
[
|
||||
'unpublished',
|
||||
{
|
||||
requiredAttributes: ['author', 'title', 'note'],
|
||||
},
|
||||
],
|
||||
])
|
||||
|
||||
const checkRequiredFields = (
|
||||
nodeRef: SyntaxNodeRef,
|
||||
state: EditorState
|
||||
): Diagnostic[] => {
|
||||
// We just return no errors if we don't find the info we're looking for in the
|
||||
// syntax tree
|
||||
const node = nodeRef.node
|
||||
|
||||
const entryNameNode = node.getChild(EntryName)
|
||||
if (!entryNameNode) {
|
||||
return []
|
||||
}
|
||||
|
||||
const entryTypeNameNode = entryNameNode.getChild(EntryTypeName)
|
||||
if (!entryTypeNameNode) {
|
||||
return []
|
||||
}
|
||||
const entryTypeName = state
|
||||
.sliceDoc(entryTypeNameNode.from, entryTypeNameNode.to)
|
||||
.toLowerCase()
|
||||
const environment = bibEntryValidationRules.get(entryTypeName)
|
||||
if (!environment) {
|
||||
return []
|
||||
}
|
||||
const requiredFields = environment.requiredAttributes
|
||||
|
||||
const actualFieldNodes = node.getChildren('Field')
|
||||
const actualFieldNames = new Set(
|
||||
actualFieldNodes
|
||||
.map(fieldNode => fieldNode.getChild(FieldName))
|
||||
.map(fieldNode =>
|
||||
fieldNode ? state.sliceDoc(fieldNode.from, fieldNode.to) : undefined
|
||||
)
|
||||
.filter(Boolean)
|
||||
.map(name => name?.toLowerCase())
|
||||
)
|
||||
|
||||
if (actualFieldNames.has('crossref')) {
|
||||
// We don't want to deal with crossrefs (key inheritance from other entries)
|
||||
return []
|
||||
}
|
||||
|
||||
const entryHasField = (fieldName: string): boolean => {
|
||||
if (actualFieldNames.has(fieldName)) {
|
||||
return true
|
||||
}
|
||||
if (environment.biblatex && environment.biblatex[fieldName]) {
|
||||
return actualFieldNames.has(environment.biblatex[fieldName])
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
const missingFields = requiredFields.filter(field => {
|
||||
if (Array.isArray(field)) {
|
||||
return !field.some(f => entryHasField(f))
|
||||
} else {
|
||||
return !entryHasField(field)
|
||||
}
|
||||
})
|
||||
|
||||
if (missingFields.length === 0) {
|
||||
// All is good
|
||||
return []
|
||||
}
|
||||
|
||||
return [
|
||||
{
|
||||
from: entryNameNode.from,
|
||||
to: entryNameNode.to,
|
||||
message: createErrorMessage(missingFields, entryTypeName, state),
|
||||
severity: 'warning',
|
||||
},
|
||||
]
|
||||
}
|
||||
|
||||
function createErrorMessage(
|
||||
missingFields: (string[] | string)[],
|
||||
entryTypeName: string,
|
||||
state: EditorState
|
||||
) {
|
||||
const translation =
|
||||
missingFields.length === 1
|
||||
? state.phrase('missing_field_for_entry')
|
||||
: state.phrase('missing_fields_for_entry')
|
||||
const or = state.phrase('or')
|
||||
const errorLines = missingFields
|
||||
.map(fieldOptions => {
|
||||
const options = Array.isArray(fieldOptions)
|
||||
? fieldOptions
|
||||
: [fieldOptions]
|
||||
return createOrList(options, or)
|
||||
})
|
||||
.map(field => ` • ${field}`)
|
||||
.join('\n')
|
||||
return `${translation} ${entryTypeName}:\n${errorLines}`
|
||||
}
|
||||
|
||||
function createOrList(fields: string[], orPhrase: string) {
|
||||
if (fields.length === 0) {
|
||||
return ''
|
||||
}
|
||||
if (fields.length === 1) {
|
||||
return fields[0]
|
||||
}
|
||||
return (
|
||||
fields.slice(0, -1).join(', ') + ` ${orPhrase} ` + fields[fields.length - 1]
|
||||
)
|
||||
}
|
||||
@@ -0,0 +1,68 @@
|
||||
import { LanguageDescription } from '@codemirror/language'
|
||||
|
||||
export const languages = [
|
||||
LanguageDescription.of({
|
||||
name: 'latex',
|
||||
extensions: [
|
||||
'tex',
|
||||
'sty',
|
||||
'cls',
|
||||
'clo',
|
||||
'bst',
|
||||
'bbl',
|
||||
'pdf_tex',
|
||||
'pdf_t',
|
||||
'map',
|
||||
'fd',
|
||||
'enc',
|
||||
'def',
|
||||
'mf',
|
||||
'pgf',
|
||||
'tikz',
|
||||
'bbx',
|
||||
'cbx',
|
||||
'dbx',
|
||||
'lbx',
|
||||
'lco',
|
||||
'ldf',
|
||||
'xmpdata',
|
||||
'Rnw',
|
||||
'rnw',
|
||||
'lyx',
|
||||
'inc',
|
||||
'dtx',
|
||||
'hak',
|
||||
'eps_tex',
|
||||
'brf',
|
||||
'ins',
|
||||
'hva',
|
||||
'Rtex',
|
||||
'rtex',
|
||||
'pstex',
|
||||
'pstex_t',
|
||||
'gin',
|
||||
'fontspec',
|
||||
'pygstyle',
|
||||
'pygtex',
|
||||
'ps_tex',
|
||||
'ltx',
|
||||
],
|
||||
load: () => {
|
||||
return import('./latex').then(m => m.latex())
|
||||
},
|
||||
}),
|
||||
LanguageDescription.of({
|
||||
name: 'bibtex',
|
||||
extensions: ['bib'],
|
||||
load: () => {
|
||||
return import('./bibtex').then(m => m.bibtex())
|
||||
},
|
||||
}),
|
||||
LanguageDescription.of({
|
||||
name: 'markdown',
|
||||
extensions: ['md', 'markdown'],
|
||||
load: () => {
|
||||
return import('./markdown').then(m => m.markdown())
|
||||
},
|
||||
}),
|
||||
]
|
||||
@@ -0,0 +1,129 @@
|
||||
import {
|
||||
CharCategory,
|
||||
EditorState,
|
||||
SelectionRange,
|
||||
Text,
|
||||
} from '@codemirror/state'
|
||||
import {
|
||||
CloseBracketConfig,
|
||||
nextChar,
|
||||
prevChar,
|
||||
} from '@codemirror/autocomplete'
|
||||
|
||||
export const closeBracketConfig: CloseBracketConfig = {
|
||||
brackets: ['$', '$$', '[', '{', '('],
|
||||
buildInsert(
|
||||
state: EditorState,
|
||||
range: SelectionRange,
|
||||
open: string,
|
||||
close: string
|
||||
): string {
|
||||
switch (open) {
|
||||
// close for $ or $$
|
||||
case '$': {
|
||||
const prev = prevChar(state.doc, range.head)
|
||||
if (prev === '\\') {
|
||||
const preprev = prevChar(state.doc, range.head - prev.length)
|
||||
// add an unprefixed closing dollar to \\$
|
||||
if (preprev === '\\') {
|
||||
return open + '$'
|
||||
}
|
||||
// don't auto-close \$
|
||||
return open
|
||||
}
|
||||
|
||||
const next = nextChar(state.doc, range.head)
|
||||
if (next === '\\') {
|
||||
// avoid auto-closing $ before a TeX command
|
||||
const pos = range.head + prev.length
|
||||
const postnext = nextChar(state.doc, pos)
|
||||
|
||||
if (state.charCategorizer(pos)(postnext) !== CharCategory.Word) {
|
||||
return open + '$'
|
||||
}
|
||||
|
||||
// don't auto-close $\command
|
||||
return open
|
||||
}
|
||||
|
||||
// avoid creating an odd number of dollar signs
|
||||
const count = countSurroundingCharacters(state.doc, range.from, open)
|
||||
if (count % 2 !== 0) {
|
||||
return open
|
||||
}
|
||||
return open + close
|
||||
}
|
||||
|
||||
// close for [ or \[
|
||||
case '[': {
|
||||
const prev = prevChar(state.doc, range.head)
|
||||
if (prev === '\\') {
|
||||
const preprev = prevChar(state.doc, range.head - prev.length)
|
||||
// add an unprefixed closing bracket to \\[
|
||||
if (preprev === '\\') {
|
||||
return open + ']'
|
||||
}
|
||||
return open + '\\' + close
|
||||
}
|
||||
return open + close
|
||||
}
|
||||
|
||||
// only close for \(
|
||||
case '(': {
|
||||
const prev = prevChar(state.doc, range.head)
|
||||
if (prev === '\\') {
|
||||
const preprev = prevChar(state.doc, range.head - prev.length)
|
||||
// don't auto-close \\(
|
||||
if (preprev === '\\') {
|
||||
return open
|
||||
}
|
||||
return open + '\\' + close
|
||||
}
|
||||
return open
|
||||
}
|
||||
|
||||
// only close for {
|
||||
case '{': {
|
||||
const prev = prevChar(state.doc, range.head)
|
||||
if (prev === '\\') {
|
||||
const preprev = prevChar(state.doc, range.head - prev.length)
|
||||
// add an unprefixed closing bracket to \\{
|
||||
if (preprev === '\\') {
|
||||
return open + '}'
|
||||
}
|
||||
// don't auto-close \{
|
||||
return open
|
||||
}
|
||||
return open + close
|
||||
}
|
||||
|
||||
default:
|
||||
return open + close
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
function countSurroundingCharacters(doc: Text, pos: number, insert: string) {
|
||||
let count = 0
|
||||
// count backwards
|
||||
let to = pos
|
||||
do {
|
||||
const char = doc.sliceString(to - insert.length, to)
|
||||
if (char !== insert) {
|
||||
break
|
||||
}
|
||||
count++
|
||||
to--
|
||||
} while (to > 1)
|
||||
// count forwards
|
||||
let from = pos
|
||||
do {
|
||||
const char = doc.sliceString(from, from + insert.length)
|
||||
if (char !== insert) {
|
||||
break
|
||||
}
|
||||
count++
|
||||
from++
|
||||
} while (from < doc.length)
|
||||
return count
|
||||
}
|
||||
@@ -0,0 +1,86 @@
|
||||
import { toggleVisualEffect } from '../../extensions/visual/visual'
|
||||
import customLocalStorage from '../../../../infrastructure/local-storage'
|
||||
import { Compartment } from '@codemirror/state'
|
||||
import { EditorView, ViewPlugin } from '@codemirror/view'
|
||||
import { treeView } from '@overleaf/codemirror-tree-view'
|
||||
|
||||
// to enable: window.localStorage.setItem('cm6-dev-tools', '"on"')
|
||||
// to disable: window.localStorage.removeItem('cm6-dev-tools')
|
||||
const enabled = customLocalStorage.getItem('cm6-dev-tools') === 'on'
|
||||
|
||||
const devToolsConf = new Compartment()
|
||||
|
||||
/**
|
||||
* A panel which displays an outline of the current document's syntax tree alongside the document,
|
||||
* to assist with CodeMirror extension development.
|
||||
*/
|
||||
export const codemirrorDevTools = () => {
|
||||
return enabled ? [devToolsButton, devToolsConf.of(createExtension())] : []
|
||||
}
|
||||
|
||||
const devToolsButton = ViewPlugin.define(view => {
|
||||
const getContainer = () => document.querySelector('.ol-cm-toolbar-end')
|
||||
|
||||
const removeButton = () => {
|
||||
getContainer()?.querySelector('#cm6-dev-tools-button')?.remove()
|
||||
}
|
||||
|
||||
const addButton = () => {
|
||||
const button = document.createElement('button')
|
||||
button.classList.add('btn', 'formatting-btn', 'formatting-btn-icon')
|
||||
button.id = 'cm6-dev-tools-button'
|
||||
button.textContent = '🦧'
|
||||
button.style.border = 'none'
|
||||
button.style.outline = 'none'
|
||||
button.addEventListener('click', event => {
|
||||
event.preventDefault()
|
||||
view.dispatch(toggleDevTools())
|
||||
})
|
||||
|
||||
getContainer()?.append(button)
|
||||
}
|
||||
|
||||
window.setTimeout(() => {
|
||||
removeButton()
|
||||
addButton()
|
||||
})
|
||||
|
||||
return {
|
||||
update(update) {
|
||||
for (const tr of update.transactions) {
|
||||
for (const effect of tr.effects) {
|
||||
if (effect.is(toggleVisualEffect)) {
|
||||
window.setTimeout(() => {
|
||||
removeButton()
|
||||
addButton()
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
destroy() {
|
||||
removeButton()
|
||||
},
|
||||
}
|
||||
})
|
||||
|
||||
const isActive = () =>
|
||||
customLocalStorage.getItem('cm6-dev-tools-active') === 'on'
|
||||
|
||||
const toggleDevTools = () => {
|
||||
customLocalStorage.setItem('cm6-dev-tools-active', isActive() ? 'off' : 'on')
|
||||
|
||||
return {
|
||||
effects: devToolsConf.reconfigure(createExtension()),
|
||||
}
|
||||
}
|
||||
|
||||
const treeViewTheme = EditorView.baseTheme({
|
||||
// note: duplicate selector to ensure extension theme styles are overriden
|
||||
'.cm-tree-view-container.cm-tree-view-container': {
|
||||
top: '32px',
|
||||
minHeight: 'unset',
|
||||
},
|
||||
})
|
||||
|
||||
const createExtension = () => (isActive() ? [treeView, treeViewTheme] : [])
|
||||
@@ -0,0 +1,471 @@
|
||||
import {
|
||||
CompletionContext,
|
||||
CompletionResult,
|
||||
CompletionSource,
|
||||
ifIn,
|
||||
} from '@codemirror/autocomplete'
|
||||
import { customEndCompletions } from './completions/environments'
|
||||
import { customCommandCompletions } from './completions/doc-commands'
|
||||
import {
|
||||
customEnvironmentCompletions,
|
||||
findEnvironmentsInDoc,
|
||||
} from './completions/doc-environments'
|
||||
import { Completions } from './completions/types'
|
||||
import { buildReferenceCompletions } from './completions/references'
|
||||
import { buildPackageCompletions } from './completions/packages'
|
||||
import { buildLabelCompletions } from './completions/labels'
|
||||
import { buildIncludeCompletions } from './completions/include'
|
||||
import { buildBibliographyStyleCompletions } from './completions/bibliography-styles'
|
||||
import { buildClassCompletions } from './completions/classes'
|
||||
import { buildAllCompletions } from './completions'
|
||||
import {
|
||||
ifInType,
|
||||
cursorIsAtBeginEnvironment,
|
||||
cursorIsAtEndEnvironment,
|
||||
} from '../../utils/tree-query'
|
||||
import {
|
||||
applySnippet,
|
||||
extendOverUnpairedClosingBrace,
|
||||
} from './completions/apply'
|
||||
import { snippet } from './completions/data/environments'
|
||||
import { syntaxTree } from '@codemirror/language'
|
||||
|
||||
function blankCompletions(): Completions {
|
||||
return {
|
||||
bibliographies: [],
|
||||
bibliographyStyles: [],
|
||||
classes: [],
|
||||
commands: [],
|
||||
graphics: [],
|
||||
includes: [],
|
||||
labels: [],
|
||||
packages: [],
|
||||
references: [],
|
||||
}
|
||||
}
|
||||
|
||||
export function getCompletionMatches(context: CompletionContext) {
|
||||
// NOTE: [^\\] is needed to match commands inside the parameters of other commands
|
||||
const matchBefore = context.explicit
|
||||
? context.matchBefore(/(?:^|\\)[^\\]*(\[[^\]]*])?[^\\]*/) // don't require a backslash if opening on explicit "startCompletion" keypress
|
||||
: context.matchBefore(/\\?\\[^\\]*(\[[^\]]*])?[^\\]*/)
|
||||
|
||||
if (!matchBefore) {
|
||||
return null
|
||||
}
|
||||
|
||||
// ignore some matches when not opening on explicit "startCompletion" keypress
|
||||
if (!context.explicit) {
|
||||
// ignore matches that end with two backslashes. \\ shouldn't show the autocomplete as it's used for line break.
|
||||
if (/\\\\$/.test(matchBefore.text)) {
|
||||
return null
|
||||
}
|
||||
|
||||
// ignore matches that end with whitespace, unless after a comma
|
||||
// e.g. \item with a trailing space shouldn't show the autocomplete.
|
||||
if (/[^,\s]\s+$/.test(matchBefore.text)) {
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
const multipleArgumentMatcher =
|
||||
/^(?<before>\\(?<command>\w+)\*?(?<arguments>(\[[^\]]*?]|\{[^}]*?})+)?{)(?<existing>([^}]+\s*,\s*)+)?(?<prefix>[^}]+)?$/
|
||||
// If this is a command with multiple comma-separated arguments, show deduplicated available completions
|
||||
const match = matchBefore.text.match(multipleArgumentMatcher)
|
||||
|
||||
return { match, matchBefore }
|
||||
}
|
||||
|
||||
export function getCompletionDetails(
|
||||
match: RegExpMatchArray,
|
||||
matchBefore: {
|
||||
from: number
|
||||
to: number
|
||||
text: string
|
||||
}
|
||||
) {
|
||||
let { before, command, existing } = match.groups as {
|
||||
before?: string
|
||||
command: string
|
||||
existing?: string
|
||||
}
|
||||
|
||||
command = command.toLowerCase()
|
||||
|
||||
const existingKeys = existing ? splitExistingKeys(existing) : []
|
||||
|
||||
const from =
|
||||
matchBefore.from + (before?.length || 0) + (existing?.length || 0)
|
||||
const validFor = /[^}\s]*/
|
||||
|
||||
return { command, existingKeys, from, validFor }
|
||||
}
|
||||
|
||||
export type CompletionBuilderOptions = {
|
||||
context: CompletionContext
|
||||
completions: Completions
|
||||
match: RegExpMatchArray
|
||||
matchBefore: { from: number; to: number; text: string }
|
||||
existingKeys: string[]
|
||||
from: number
|
||||
validFor: RegExp
|
||||
before: string
|
||||
}
|
||||
|
||||
export const makeArgumentCompletionSource = (
|
||||
ifInSpec: string[],
|
||||
builder: (
|
||||
builderOptions: CompletionBuilderOptions
|
||||
) => CompletionResult | null | Promise<CompletionResult | null>
|
||||
): CompletionSource => {
|
||||
const completionSource: CompletionSource = (context: CompletionContext) => {
|
||||
const completionMatches = getCompletionMatches(context)
|
||||
|
||||
if (!completionMatches) {
|
||||
return null
|
||||
}
|
||||
|
||||
const completions: Completions = blankCompletions()
|
||||
|
||||
const { match, matchBefore } = completionMatches
|
||||
|
||||
if (!match) {
|
||||
return null
|
||||
}
|
||||
|
||||
const { before } = match.groups as {
|
||||
before: string
|
||||
}
|
||||
|
||||
const { existingKeys, from, validFor } = getCompletionDetails(
|
||||
match,
|
||||
matchBefore
|
||||
)
|
||||
|
||||
return builder({
|
||||
completions,
|
||||
context,
|
||||
match,
|
||||
matchBefore,
|
||||
before,
|
||||
existingKeys,
|
||||
from,
|
||||
validFor,
|
||||
})
|
||||
}
|
||||
return ifIn(ifInSpec, completionSource)
|
||||
}
|
||||
|
||||
const splitExistingKeys = (text: string) =>
|
||||
text
|
||||
.split(',')
|
||||
.map(key => key.trim())
|
||||
.filter(Boolean)
|
||||
|
||||
export const makeMultipleArgumentCompletionSource = (
|
||||
ifInSpec: string[],
|
||||
builder: (
|
||||
builderOptions: Pick<
|
||||
CompletionBuilderOptions,
|
||||
'completions' | 'context' | 'existingKeys' | 'from' | 'validFor'
|
||||
>
|
||||
) => ReturnType<CompletionSource>
|
||||
): CompletionSource => {
|
||||
const completionSource: CompletionSource = (context: CompletionContext) => {
|
||||
const token = context.tokenBefore(ifInSpec)
|
||||
|
||||
if (!token) {
|
||||
return null
|
||||
}
|
||||
|
||||
// match multiple comma-separated arguments, up to the last separator
|
||||
const existing = token.text.match(/^\{(.+\s*,\s*)?.*$/)?.[1] ?? ''
|
||||
|
||||
return builder({
|
||||
completions: blankCompletions(),
|
||||
context,
|
||||
existingKeys: splitExistingKeys(existing),
|
||||
from: token.from + 1 + existing.length,
|
||||
validFor: /[^}\s]*/,
|
||||
})
|
||||
}
|
||||
return ifIn(ifInSpec, completionSource)
|
||||
}
|
||||
|
||||
export const bibKeyArgumentCompletionSource: CompletionSource =
|
||||
makeMultipleArgumentCompletionSource(
|
||||
['BibKeyArgument'],
|
||||
({ completions, context, from, validFor, existingKeys }) => {
|
||||
buildReferenceCompletions(completions, context)
|
||||
|
||||
return {
|
||||
from,
|
||||
validFor,
|
||||
options: completions.references.filter(
|
||||
item => !existingKeys.includes(item.label)
|
||||
),
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
export const refArgumentCompletionSource: CompletionSource =
|
||||
makeMultipleArgumentCompletionSource(
|
||||
['RefArgument'],
|
||||
({ completions, context, from, validFor, existingKeys }) => {
|
||||
buildLabelCompletions(completions, context)
|
||||
|
||||
return {
|
||||
from,
|
||||
validFor,
|
||||
options: completions.labels.filter(
|
||||
item => !existingKeys.includes(item.label)
|
||||
),
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
export const packageArgumentCompletionSource: CompletionSource =
|
||||
makeMultipleArgumentCompletionSource(
|
||||
['PackageArgument'],
|
||||
({ completions, context, from, validFor, existingKeys }) => {
|
||||
buildPackageCompletions(completions, context)
|
||||
|
||||
return {
|
||||
from,
|
||||
validFor,
|
||||
options: completions.packages.filter(
|
||||
item => !existingKeys.includes(item.label)
|
||||
),
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
export const inputArgumentCompletionSource: CompletionSource =
|
||||
makeArgumentCompletionSource(
|
||||
['InputArgument'],
|
||||
({ completions, context, from }) => {
|
||||
buildIncludeCompletions(completions, context)
|
||||
|
||||
return {
|
||||
from,
|
||||
validFor: /^[^}]*/,
|
||||
options: completions.includes,
|
||||
}
|
||||
}
|
||||
)
|
||||
export const includeArgumentCompletionSource: CompletionSource =
|
||||
makeArgumentCompletionSource(
|
||||
['IncludeArgument'],
|
||||
({ completions, context, from }) => {
|
||||
buildIncludeCompletions(completions, context)
|
||||
|
||||
return {
|
||||
from,
|
||||
validFor: /^[^}]*/,
|
||||
options: completions.includes,
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
export const includeGraphicsArgumentCompletionSource: CompletionSource =
|
||||
makeArgumentCompletionSource(
|
||||
['IncludeGraphicsArgument'],
|
||||
({ completions, context, from }) => {
|
||||
buildIncludeCompletions(completions, context)
|
||||
|
||||
return {
|
||||
from,
|
||||
validFor: /^[^}]*/,
|
||||
options: completions.graphics,
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
export const environmentNameCompletionSource: CompletionSource =
|
||||
makeArgumentCompletionSource(
|
||||
['EnvNameGroup'],
|
||||
({ completions, context, matchBefore, before }) => {
|
||||
if (cursorIsAtBeginEnvironment(context.state, context.pos)) {
|
||||
buildAllCompletions(completions, context)
|
||||
|
||||
return {
|
||||
from: matchBefore.from,
|
||||
validFor: /^\\begin{\S*/,
|
||||
options: [
|
||||
...completions.commands,
|
||||
...customEnvironmentCompletions(context),
|
||||
],
|
||||
}
|
||||
} else if (cursorIsAtEndEnvironment(context.state, context.pos)) {
|
||||
return {
|
||||
from: matchBefore.from + before.length,
|
||||
validFor: /^[^}]*/,
|
||||
options: customEndCompletions(context),
|
||||
}
|
||||
} else {
|
||||
return null
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
export const documentClassArgumentCompletionSource: CompletionSource =
|
||||
makeArgumentCompletionSource(
|
||||
['DocumentClassArgument'],
|
||||
({ completions, from }) => {
|
||||
buildClassCompletions(completions)
|
||||
|
||||
return {
|
||||
from,
|
||||
validFor: /^[^}]*/,
|
||||
options: completions.classes,
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
export const bibliographyArgumentCompletionSource: CompletionSource =
|
||||
makeArgumentCompletionSource(
|
||||
['BibliographyArgument'],
|
||||
({ completions, context, from }) => {
|
||||
buildIncludeCompletions(completions, context)
|
||||
|
||||
return {
|
||||
from,
|
||||
validFor: /^[^}]*/,
|
||||
options: completions.bibliographies,
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
export const bibliographyStyleArgumentCompletionSource: CompletionSource =
|
||||
makeArgumentCompletionSource(
|
||||
['BibliographyStyleArgument'],
|
||||
({ completions, from }) => {
|
||||
buildBibliographyStyleCompletions(completions)
|
||||
|
||||
return {
|
||||
from,
|
||||
validFor: /^[^}]*/,
|
||||
options: completions.bibliographyStyles,
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
export const argumentCompletionSources: CompletionSource[] = [
|
||||
bibKeyArgumentCompletionSource,
|
||||
refArgumentCompletionSource,
|
||||
packageArgumentCompletionSource,
|
||||
inputArgumentCompletionSource,
|
||||
includeArgumentCompletionSource,
|
||||
includeGraphicsArgumentCompletionSource,
|
||||
environmentNameCompletionSource,
|
||||
documentClassArgumentCompletionSource,
|
||||
bibliographyArgumentCompletionSource,
|
||||
bibliographyStyleArgumentCompletionSource,
|
||||
]
|
||||
|
||||
const commandCompletionSource = (context: CompletionContext) => {
|
||||
const completionMatches = getCompletionMatches(context)
|
||||
|
||||
if (!completionMatches) {
|
||||
return null
|
||||
}
|
||||
|
||||
const { match, matchBefore } = completionMatches
|
||||
if (match) {
|
||||
// We're already in a command argument, bail out
|
||||
return null
|
||||
}
|
||||
|
||||
const completions: Completions = blankCompletions()
|
||||
|
||||
buildAllCompletions(completions, context)
|
||||
|
||||
// Unknown commands
|
||||
const prefixMatcher = /^\\[^{\s]*$/
|
||||
const prefixMatch = matchBefore.text.match(prefixMatcher)
|
||||
if (prefixMatch) {
|
||||
return {
|
||||
from: matchBefore.from,
|
||||
validFor: prefixMatcher,
|
||||
options: [
|
||||
...completions.commands,
|
||||
...customCommandCompletions(context, completions.commands),
|
||||
...customEnvironmentCompletions(context),
|
||||
],
|
||||
}
|
||||
}
|
||||
|
||||
// anything else (no validFor)
|
||||
return {
|
||||
from: matchBefore.to,
|
||||
options: [
|
||||
...completions.commands,
|
||||
...customCommandCompletions(context, completions.commands),
|
||||
],
|
||||
}
|
||||
}
|
||||
|
||||
export const inCommandCompletionSource: CompletionSource = ifInType(
|
||||
'$CtrlSeq',
|
||||
context => {
|
||||
return context.explicit ? null : commandCompletionSource(context)
|
||||
}
|
||||
)
|
||||
|
||||
export const explicitCommandCompletionSource: CompletionSource = context => {
|
||||
return context.explicit ? commandCompletionSource(context) : null
|
||||
}
|
||||
|
||||
/**
|
||||
* An additional completion source that handles two situations:
|
||||
*
|
||||
* 1. Typing the environment name within an already-complete `\begin{…}` command.
|
||||
* 2. After typing the closing brace of a complete `\begin{foo}` command, where the environment
|
||||
* isn't previously known, leaving the cursor after the closing brace.
|
||||
*/
|
||||
export const beginEnvironmentCompletionSource: CompletionSource = context => {
|
||||
const beginEnvToken = context.tokenBefore(['BeginEnv'])
|
||||
if (!beginEnvToken) {
|
||||
return null
|
||||
}
|
||||
|
||||
const beginEnv = syntaxTree(context.state).resolveInner(
|
||||
beginEnvToken.from,
|
||||
1
|
||||
).parent
|
||||
if (!beginEnv?.type.is('BeginEnv')) {
|
||||
return null
|
||||
}
|
||||
|
||||
const envNameGroup = beginEnv.getChild('EnvNameGroup')
|
||||
if (!envNameGroup) {
|
||||
return null
|
||||
}
|
||||
|
||||
const envName = envNameGroup.getChild('$EnvName')
|
||||
if (!envName) {
|
||||
return null
|
||||
}
|
||||
|
||||
const name = context.state.sliceDoc(envName.from, envName.to)
|
||||
|
||||
// if not directly after `\begin{…}`, exclude known environments
|
||||
if (context.pos !== envNameGroup.to) {
|
||||
const existingEnvironmentNames = findEnvironmentsInDoc(context)
|
||||
if (existingEnvironmentNames.has(name)) {
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
const completion = {
|
||||
label: `\\begin{${name}} …`,
|
||||
apply: applySnippet(snippet(name)),
|
||||
extend: extendOverUnpairedClosingBrace,
|
||||
boost: -99,
|
||||
}
|
||||
|
||||
return {
|
||||
from: beginEnvToken.from,
|
||||
options: [completion],
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,142 @@
|
||||
import { EditorState, Text } from '@codemirror/state'
|
||||
import {
|
||||
clearSnippet,
|
||||
Completion,
|
||||
snippet,
|
||||
nextChar,
|
||||
} from '@codemirror/autocomplete'
|
||||
import { EditorView } from '@codemirror/view'
|
||||
import { prepareSnippetTemplate } from '../snippets'
|
||||
import { ancestorNodeOfType } from '../../../utils/tree-query'
|
||||
|
||||
export const applySnippet = (template: string, clear = false) => {
|
||||
return (
|
||||
view: EditorView,
|
||||
completion: Completion,
|
||||
from: number,
|
||||
to: number
|
||||
) => {
|
||||
snippet(prepareSnippetTemplate(template))(view, completion, from, to)
|
||||
if (clear) {
|
||||
clearSnippet(view)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const longestCommonPrefix = (...strs: string[]) => {
|
||||
if (strs.length === 0) {
|
||||
return 0
|
||||
}
|
||||
const minLength = Math.min(...strs.map(str => str.length))
|
||||
for (let i = 0; i < minLength; ++i) {
|
||||
for (let j = 1; j < strs.length; ++j) {
|
||||
if (strs[j][i] !== strs[0][i]) {
|
||||
return i
|
||||
}
|
||||
}
|
||||
}
|
||||
return minLength
|
||||
}
|
||||
|
||||
// extend forwards to cover an unpaired closing brace
|
||||
export const extendRequiredParameter = (
|
||||
state: EditorState,
|
||||
change: {
|
||||
from: number
|
||||
to: number
|
||||
insert: string | Text
|
||||
}
|
||||
) => {
|
||||
if (typeof change.insert !== 'string') {
|
||||
change.insert = change.insert.toString()
|
||||
}
|
||||
|
||||
const argumentNode = ancestorNodeOfType(state, change.from, '$Argument')
|
||||
const isWellFormedArgumentNode =
|
||||
argumentNode &&
|
||||
argumentNode.getChild('OpenBrace') &&
|
||||
argumentNode.getChild('CloseBrace')
|
||||
|
||||
if (nextChar(state.doc, change.to) === '}') {
|
||||
// include an existing closing brace, so the cursor moves after it
|
||||
change.insert += '}'
|
||||
change.to++
|
||||
} else {
|
||||
// add a closing brace if needed
|
||||
if (countUnclosedBraces(state.doc, change.from, change.to) > 0) {
|
||||
change.insert += '}'
|
||||
}
|
||||
|
||||
if (isWellFormedArgumentNode) {
|
||||
// extend over subsequent text that isn't a brace, space, or comma
|
||||
const match = state.doc
|
||||
.sliceString(
|
||||
change.to,
|
||||
Math.min(state.doc.lineAt(change.from).to, argumentNode.to)
|
||||
)
|
||||
.match(/^[^}\s,]+/)
|
||||
if (match) {
|
||||
change.to += match[0].length
|
||||
}
|
||||
} else {
|
||||
// Ensure we don't swallow a closing brace
|
||||
const restOfLine = state.doc
|
||||
.sliceString(
|
||||
change.to,
|
||||
Math.min(
|
||||
state.doc.lineAt(change.from).to,
|
||||
change.from + change.insert.length
|
||||
)
|
||||
)
|
||||
.split('}')[0]
|
||||
|
||||
change.to += longestCommonPrefix(
|
||||
change.insert.slice(change.to - change.from),
|
||||
restOfLine
|
||||
)
|
||||
}
|
||||
}
|
||||
change.insert = state.toText(change.insert)
|
||||
return change
|
||||
}
|
||||
|
||||
// extend forwards to cover an unpaired closing brace
|
||||
export const extendOverUnpairedClosingBrace = (
|
||||
state: EditorState,
|
||||
change: {
|
||||
from: number
|
||||
to: number
|
||||
}
|
||||
) => {
|
||||
if (nextChar(state.doc, change.to) === '}') {
|
||||
const unclosedBraces = countUnclosedBraces(
|
||||
state.doc,
|
||||
change.from,
|
||||
change.to
|
||||
)
|
||||
if (unclosedBraces < 0) {
|
||||
change.to++
|
||||
}
|
||||
}
|
||||
return change
|
||||
}
|
||||
|
||||
const countUnclosedBraces = (doc: Text, from: number, to: number): number => {
|
||||
const line = doc.lineAt(from)
|
||||
|
||||
const textBefore = doc.sliceString(line.from, from)
|
||||
const textAfter = doc.sliceString(to, line.to)
|
||||
|
||||
const textAfterMatch = textAfter.match(/^[^\\]*/)
|
||||
|
||||
const openBraces =
|
||||
(textBefore.match(/\{/g) || []).length -
|
||||
(textBefore.match(/}/g) || []).length
|
||||
|
||||
const closedBraces = textAfterMatch
|
||||
? (textAfterMatch[0].match(/}/g) || []).length -
|
||||
(textAfterMatch[0].match(/\{/g) || []).length
|
||||
: 0
|
||||
|
||||
return openBraces - closedBraces
|
||||
}
|
||||
@@ -0,0 +1,17 @@
|
||||
import { Completions } from './types'
|
||||
import { extendRequiredParameter } from './apply'
|
||||
import { bibliographyStyles } from './data/bibliography-styles'
|
||||
|
||||
const values = Object.values(bibliographyStyles).flat()
|
||||
|
||||
export function buildBibliographyStyleCompletions(completions: Completions) {
|
||||
// TODO: find bibliography package from context and use only relevant styles
|
||||
|
||||
for (const item of values) {
|
||||
completions.bibliographyStyles.push({
|
||||
type: 'bib',
|
||||
label: item,
|
||||
extend: extendRequiredParameter,
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,13 @@
|
||||
import { extendRequiredParameter } from './apply'
|
||||
import { classNames } from './data/class-names'
|
||||
import { Completions } from './types'
|
||||
|
||||
export function buildClassCompletions(completions: Completions) {
|
||||
for (const item of classNames) {
|
||||
completions.classes.push({
|
||||
type: 'cls',
|
||||
label: item,
|
||||
extend: extendRequiredParameter,
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,45 @@
|
||||
export const bibliographyStyles: Record<string, string[]> = {
|
||||
// https://www.overleaf.com/learn/latex/Bibtex_bibliography_styles
|
||||
bibtex: [
|
||||
'abbrv',
|
||||
'acm',
|
||||
'alpha',
|
||||
'apalike',
|
||||
'ieeetr',
|
||||
'plain',
|
||||
'siam',
|
||||
'unsrt',
|
||||
],
|
||||
// https://www.overleaf.com/learn/latex/Natbib_bibliography_styles
|
||||
natbib: ['dinat', 'plainnat', 'abbrvnat', 'unsrtnat', 'rusnat', 'ksfh_nat'],
|
||||
// https://www.overleaf.com/learn/latex/Biblatex_bibliography_styles
|
||||
biblatex: [
|
||||
'numeric',
|
||||
'alphabetic',
|
||||
'authoryear',
|
||||
'authortitle',
|
||||
'verbose',
|
||||
'reading',
|
||||
'draft',
|
||||
'authoryear-icomp',
|
||||
'apa',
|
||||
'bwl-FU',
|
||||
'chem-acs',
|
||||
'chem-angew',
|
||||
'chem-biochem',
|
||||
'chem-rsc',
|
||||
'ieee',
|
||||
'mla',
|
||||
'musuos',
|
||||
'nature',
|
||||
'nejm',
|
||||
'phys',
|
||||
'science',
|
||||
'geschichtsfrkl',
|
||||
'oscola',
|
||||
],
|
||||
// https://ctan.org/tex-archive/macros/latex/contrib/biblatex-contrib
|
||||
'biblatex-contrib': [
|
||||
// TODO
|
||||
],
|
||||
}
|
||||
@@ -0,0 +1,11 @@
|
||||
// https://www.overleaf.com/learn/latex/Creating_a_document_in_LaTeX#Reference_guide
|
||||
|
||||
// TODO: more class names
|
||||
export const classNames = [
|
||||
'article',
|
||||
'report',
|
||||
'book',
|
||||
'letter',
|
||||
// 'slides',
|
||||
'beamer',
|
||||
]
|
||||
@@ -0,0 +1,83 @@
|
||||
export const snippet = (name: string) => `\\begin{${name}}
|
||||
\t$1
|
||||
\\end{${name}}`
|
||||
|
||||
export const snippetNoIndent = (name: string) => `\\begin{${name}}
|
||||
$1
|
||||
\\end{${name}}`
|
||||
|
||||
export const environments = new Map([
|
||||
['abstract', snippet('abstract')],
|
||||
['align', snippet('align')],
|
||||
['align*', snippet('align*')],
|
||||
[
|
||||
'array',
|
||||
`\\begin{array}{\${1:cc}}
|
||||
\t$2 & $3 \\\\
|
||||
\t$4 & $5
|
||||
\\end{array}`,
|
||||
],
|
||||
['center', snippet('center')],
|
||||
[
|
||||
'description',
|
||||
`\\begin{description}
|
||||
\t\\item[$1] $2
|
||||
\\end{description}`,
|
||||
],
|
||||
['document', snippetNoIndent('document')],
|
||||
['equation', snippet('equation')],
|
||||
['equation*', snippet('equation*')],
|
||||
[
|
||||
'enumerate',
|
||||
`\\begin{enumerate}
|
||||
\t\\item $1
|
||||
\\end{enumerate}`,
|
||||
],
|
||||
[
|
||||
'figure',
|
||||
`\\begin{figure}
|
||||
\t\\centering
|
||||
\t\\includegraphics[width=0.5\\linewidth]{$1}
|
||||
\t\\caption{\${2:Caption}}
|
||||
\t\\label{\${3:fig:enter-label}}
|
||||
\\end{figure}`,
|
||||
],
|
||||
[
|
||||
'frame',
|
||||
`\\begin{frame}{\${1:Frame Title}}
|
||||
\t$2
|
||||
\\end{frame}`,
|
||||
],
|
||||
['gather', snippet('gather')],
|
||||
['gather*', snippet('gather*')],
|
||||
[
|
||||
'itemize',
|
||||
`\\begin{itemize}
|
||||
\t\\item $1
|
||||
\\end{itemize}`,
|
||||
],
|
||||
['multline', snippet('multline')],
|
||||
['multline*', snippet('multline*')],
|
||||
['quote', snippet('quote')],
|
||||
['split', snippet('split')],
|
||||
[
|
||||
'table',
|
||||
`\\begin{table}[$1]
|
||||
\t\\centering
|
||||
\t\\begin{tabular}{\${2:c|c}}
|
||||
\t\t$3 & $4 \\\\
|
||||
\t\t$5 & $6
|
||||
\t\\end{tabular}
|
||||
\t\\caption{\${7:Caption}}
|
||||
\t\\label{\${8:tab:my_label}}
|
||||
\\end{table}`,
|
||||
],
|
||||
[
|
||||
'tabular',
|
||||
`\\begin{tabular}{\${1:c|c}}
|
||||
\t$2 & $3 \\\\
|
||||
\t$4 & $5
|
||||
\\end{tabular}`,
|
||||
],
|
||||
['verbatim', snippet('verbatim')],
|
||||
])
|
||||
@@ -0,0 +1,101 @@
|
||||
export const packageNames: string[] = [
|
||||
'inputenc',
|
||||
'graphicx',
|
||||
'amsmath',
|
||||
'geometry',
|
||||
'amssymb',
|
||||
'hyperref',
|
||||
'babel',
|
||||
'color',
|
||||
'xcolor',
|
||||
'url',
|
||||
'natbib',
|
||||
'fontenc',
|
||||
'fancyhdr',
|
||||
'amsfonts',
|
||||
'booktabs',
|
||||
'amsthm',
|
||||
'float',
|
||||
'tikz',
|
||||
'caption',
|
||||
'setspace',
|
||||
'multirow',
|
||||
'array',
|
||||
'multicol',
|
||||
'titlesec',
|
||||
'enumitem',
|
||||
'ifthen',
|
||||
'listings',
|
||||
'blindtext',
|
||||
'subcaption',
|
||||
'times',
|
||||
'bm',
|
||||
'subfigure',
|
||||
'algorithm',
|
||||
'fontspec',
|
||||
'biblatex',
|
||||
'tabularx',
|
||||
'microtype',
|
||||
'etoolbox',
|
||||
'parskip',
|
||||
'calc',
|
||||
'verbatim',
|
||||
'mathtools',
|
||||
'epsfig',
|
||||
'wrapfig',
|
||||
'lipsum',
|
||||
'cite',
|
||||
'textcomp',
|
||||
'longtable',
|
||||
'textpos',
|
||||
'algpseudocode',
|
||||
'enumerate',
|
||||
'subfig',
|
||||
'pdfpages',
|
||||
'epstopdf',
|
||||
'latexsym',
|
||||
'lmodern',
|
||||
'pifont',
|
||||
'ragged2e',
|
||||
'rotating',
|
||||
'dcolumn',
|
||||
'xltxtra',
|
||||
'marvosym',
|
||||
'indentfirst',
|
||||
'xspace',
|
||||
'csquotes',
|
||||
'xparse',
|
||||
'changepage',
|
||||
'soul',
|
||||
'xunicode',
|
||||
'comment',
|
||||
'mathrsfs',
|
||||
'tocbibind',
|
||||
'lastpage',
|
||||
'algorithm2e',
|
||||
'pgfplots',
|
||||
'lineno',
|
||||
'algorithmic',
|
||||
'fullpage',
|
||||
'mathptmx',
|
||||
'todonotes',
|
||||
'ulem',
|
||||
'tweaklist',
|
||||
'moderncvstyleclassic',
|
||||
'collection',
|
||||
'moderncvcompatibility',
|
||||
'gensymb',
|
||||
'helvet',
|
||||
'siunitx',
|
||||
'adjustbox',
|
||||
'placeins',
|
||||
'colortbl',
|
||||
'appendix',
|
||||
'makeidx',
|
||||
'supertabular',
|
||||
'ifpdf',
|
||||
'framed',
|
||||
'aliascnt',
|
||||
'layaureo',
|
||||
'authblk',
|
||||
]
|
||||
@@ -0,0 +1,7 @@
|
||||
export default [
|
||||
{
|
||||
type: 'cmd',
|
||||
label: '\\verb||',
|
||||
snippet: '\\verb|#{}|',
|
||||
},
|
||||
]
|
||||
@@ -0,0 +1,692 @@
|
||||
export default [
|
||||
{
|
||||
caption: '\\begin{}',
|
||||
snippet: '\\begin{$1}',
|
||||
meta: 'env',
|
||||
score: 7.849662248028187,
|
||||
},
|
||||
{
|
||||
caption: '\\end{}',
|
||||
snippet: '\\end{$1}',
|
||||
meta: 'env',
|
||||
score: 7.847906405228455,
|
||||
},
|
||||
{
|
||||
caption: '\\usepackage[]{}',
|
||||
snippet: '\\usepackage[$1]{$2}',
|
||||
meta: 'pkg',
|
||||
score: 5.427890758130527,
|
||||
},
|
||||
{
|
||||
caption: '\\item',
|
||||
snippet: '\\item ',
|
||||
meta: 'cmd',
|
||||
score: 3.800886892251021,
|
||||
},
|
||||
{
|
||||
caption: '\\item[]',
|
||||
snippet: '\\item[$1] ',
|
||||
meta: 'cmd',
|
||||
score: 3.800886892251021,
|
||||
},
|
||||
{
|
||||
caption: '\\section{}',
|
||||
snippet: '\\section{$1}',
|
||||
meta: 'cmd',
|
||||
score: 3.0952612541683835,
|
||||
},
|
||||
{
|
||||
caption: '\\textbf{}',
|
||||
snippet: '\\textbf{$1}',
|
||||
meta: 'cmd',
|
||||
score: 2.627755982816738,
|
||||
},
|
||||
{
|
||||
caption: '\\cite{}',
|
||||
snippet: '\\cite{$1}',
|
||||
meta: 'cmd',
|
||||
score: 2.341195220791228,
|
||||
},
|
||||
{
|
||||
caption: '\\label{}',
|
||||
snippet: '\\label{$1}',
|
||||
meta: 'cmd',
|
||||
score: 1.897791904799601,
|
||||
},
|
||||
{
|
||||
caption: '\\textit{}',
|
||||
snippet: '\\textit{$1}',
|
||||
meta: 'cmd',
|
||||
score: 1.6842996195493385,
|
||||
},
|
||||
{
|
||||
caption: '\\includegraphics[]{}',
|
||||
snippet: '\\includegraphics[$1]{$2}',
|
||||
meta: 'cmd',
|
||||
score: 1.4595731795525781,
|
||||
},
|
||||
{
|
||||
caption: '\\documentclass[]{}',
|
||||
snippet: '\\documentclass[$1]{$2}',
|
||||
meta: 'cmd',
|
||||
score: 1.4425339817971206,
|
||||
},
|
||||
{
|
||||
caption: '\\documentclass{}',
|
||||
snippet: '\\documentclass{$1}',
|
||||
meta: 'cmd',
|
||||
score: 1.4425339817971206,
|
||||
},
|
||||
{
|
||||
caption: '\\ref{}',
|
||||
snippet: '\\ref{$1}',
|
||||
meta: 'cross-reference',
|
||||
score: 0.014379554883991673,
|
||||
},
|
||||
{
|
||||
caption: '\\frac{}{}',
|
||||
snippet: '\\frac{$1}{$2}',
|
||||
meta: 'cmd',
|
||||
score: 1.4341091141105058,
|
||||
},
|
||||
{
|
||||
caption: '\\subsection{}',
|
||||
snippet: '\\subsection{$1}',
|
||||
meta: 'cmd',
|
||||
score: 1.3890912739512353,
|
||||
},
|
||||
{
|
||||
caption: '\\hline',
|
||||
snippet: '\\hline',
|
||||
meta: 'cmd',
|
||||
score: 1.3209538327406387,
|
||||
},
|
||||
{
|
||||
caption: '\\caption{}',
|
||||
snippet: '\\caption{$1}',
|
||||
meta: 'cmd',
|
||||
score: 1.2569477427490174,
|
||||
},
|
||||
{
|
||||
caption: '\\centering',
|
||||
snippet: '\\centering',
|
||||
meta: 'cmd',
|
||||
score: 1.1642881814937829,
|
||||
},
|
||||
{
|
||||
caption: '\\vspace{}',
|
||||
snippet: '\\vspace{$1}',
|
||||
meta: 'cmd',
|
||||
score: 0.9533807826673939,
|
||||
},
|
||||
{
|
||||
caption: '\\title{}',
|
||||
snippet: '\\title{$1}',
|
||||
meta: 'cmd',
|
||||
score: 0.9202908262245683,
|
||||
},
|
||||
{
|
||||
caption: '\\author{}',
|
||||
snippet: '\\author{$1}',
|
||||
meta: 'cmd',
|
||||
score: 0.8973590434087177,
|
||||
},
|
||||
{
|
||||
caption: '\\author[]{}',
|
||||
snippet: '\\author[$1]{$2}',
|
||||
meta: 'cmd',
|
||||
score: 0.8973590434087177,
|
||||
},
|
||||
{
|
||||
caption: '\\maketitle',
|
||||
snippet: '\\maketitle',
|
||||
meta: 'cmd',
|
||||
score: 0.7504160124360846,
|
||||
},
|
||||
{
|
||||
caption: '\\textwidth',
|
||||
snippet: '\\textwidth',
|
||||
meta: 'cmd',
|
||||
score: 0.7355328080889112,
|
||||
},
|
||||
{
|
||||
caption: '\\newcommand{}{}',
|
||||
snippet: '\\newcommand{$1}{$2}',
|
||||
meta: 'cmd',
|
||||
score: 0.7264891987129375,
|
||||
},
|
||||
{
|
||||
caption: '\\newcommand{}[]{}',
|
||||
snippet: '\\newcommand{$1}[$2]{$3}',
|
||||
meta: 'cmd',
|
||||
score: 0.7264891987129375,
|
||||
},
|
||||
{
|
||||
caption: '\\date{}',
|
||||
snippet: '\\date{$1}',
|
||||
meta: 'cmd',
|
||||
score: 0.7225518453076786,
|
||||
},
|
||||
{
|
||||
caption: '\\emph{}',
|
||||
snippet: '\\emph{$1}',
|
||||
meta: 'cmd',
|
||||
score: 0.7060308784832261,
|
||||
},
|
||||
{
|
||||
caption: '\\textsc{}',
|
||||
snippet: '\\textsc{$1}',
|
||||
meta: 'cmd',
|
||||
score: 0.6926466355384758,
|
||||
},
|
||||
{
|
||||
caption: '\\multicolumn{}{}{}',
|
||||
snippet: '\\multicolumn{$1}{$2}{$3}',
|
||||
meta: 'cmd',
|
||||
score: 0.5473606021405326,
|
||||
},
|
||||
{
|
||||
caption: '\\input{}',
|
||||
snippet: '\\input{$1}',
|
||||
meta: 'cmd',
|
||||
score: 0.4966021927742672,
|
||||
},
|
||||
{
|
||||
caption: '\\alpha',
|
||||
snippet: '\\alpha',
|
||||
meta: 'cmd',
|
||||
score: 0.49520006391384913,
|
||||
},
|
||||
{
|
||||
caption: '\\in',
|
||||
snippet: '\\in',
|
||||
meta: 'cmd',
|
||||
score: 0.4716039670146658,
|
||||
},
|
||||
{
|
||||
caption: '\\mathbf{}',
|
||||
snippet: '\\mathbf{$1}',
|
||||
meta: 'cmd',
|
||||
score: 0.4682018419466319,
|
||||
},
|
||||
{
|
||||
caption: '\\right',
|
||||
snippet: '\\right',
|
||||
meta: 'cmd',
|
||||
score: 0.4299239459457309,
|
||||
},
|
||||
{
|
||||
caption: '\\left',
|
||||
snippet: '\\left',
|
||||
meta: 'cmd',
|
||||
score: 0.42937815279867964,
|
||||
},
|
||||
{
|
||||
caption: '\\sum',
|
||||
snippet: '\\sum',
|
||||
meta: 'cmd',
|
||||
score: 0.42607994509619934,
|
||||
},
|
||||
{
|
||||
caption: '\\chapter{}',
|
||||
snippet: '\\chapter{$1}',
|
||||
meta: 'cmd',
|
||||
score: 0.422097569591803,
|
||||
},
|
||||
{
|
||||
caption: '\\par',
|
||||
snippet: '\\par',
|
||||
meta: 'cmd',
|
||||
score: 0.413853376001159,
|
||||
},
|
||||
{
|
||||
caption: '\\lambda',
|
||||
snippet: '\\lambda',
|
||||
meta: 'cmd',
|
||||
score: 0.39389600578684125,
|
||||
},
|
||||
{
|
||||
caption: '\\subsubsection{}',
|
||||
snippet: '\\subsubsection{$1}',
|
||||
meta: 'cmd',
|
||||
score: 0.3727781330132016,
|
||||
},
|
||||
{
|
||||
caption: '\\bibitem{}',
|
||||
snippet: '\\bibitem{$1}',
|
||||
meta: 'cmd',
|
||||
score: 0.3689547570562042,
|
||||
},
|
||||
{
|
||||
caption: '\\bibitem[]{}',
|
||||
snippet: '\\bibitem[$1]{$2}',
|
||||
meta: 'cmd',
|
||||
score: 0.3689547570562042,
|
||||
},
|
||||
{
|
||||
caption: '\\text{}',
|
||||
snippet: '\\text{$1}',
|
||||
meta: 'cmd',
|
||||
score: 0.3608680734736821,
|
||||
},
|
||||
{
|
||||
caption: '\\setlength{}{}',
|
||||
snippet: '\\setlength{$1}{$2}',
|
||||
meta: 'cmd',
|
||||
score: 0.354445763583904,
|
||||
},
|
||||
{
|
||||
caption: '\\mathcal{}',
|
||||
snippet: '\\mathcal{$1}',
|
||||
meta: 'cmd',
|
||||
score: 0.35084018920966636,
|
||||
},
|
||||
{
|
||||
caption: '\\newpage',
|
||||
snippet: '\\newpage',
|
||||
meta: 'cmd',
|
||||
score: 0.3277033727934986,
|
||||
},
|
||||
{
|
||||
caption: '\\renewcommand{}{}',
|
||||
snippet: '\\renewcommand{$1}{$2}',
|
||||
meta: 'cmd',
|
||||
score: 0.3267437011085663,
|
||||
},
|
||||
{
|
||||
caption: '\\theta',
|
||||
snippet: '\\theta',
|
||||
meta: 'cmd',
|
||||
score: 0.3210417159232142,
|
||||
},
|
||||
{
|
||||
caption: '\\hspace{}',
|
||||
snippet: '\\hspace{$1}',
|
||||
meta: 'cmd',
|
||||
score: 0.3147206476372336,
|
||||
},
|
||||
{
|
||||
caption: '\\beta',
|
||||
snippet: '\\beta',
|
||||
meta: 'cmd',
|
||||
score: 0.3061799530337638,
|
||||
},
|
||||
{
|
||||
caption: '\\texttt{}',
|
||||
snippet: '\\texttt{$1}',
|
||||
meta: 'cmd',
|
||||
score: 0.3019066753744355,
|
||||
},
|
||||
{
|
||||
caption: '\\times',
|
||||
snippet: '\\times',
|
||||
meta: 'cmd',
|
||||
score: 0.2957960629411553,
|
||||
},
|
||||
{
|
||||
caption: '\\color{}',
|
||||
snippet: '\\color{$1}',
|
||||
meta: 'cmd',
|
||||
score: 0.2864294797053033,
|
||||
},
|
||||
{
|
||||
caption: '\\mu',
|
||||
snippet: '\\mu',
|
||||
meta: 'cmd',
|
||||
score: 0.27635652476799255,
|
||||
},
|
||||
{
|
||||
caption: '\\bibliography{}',
|
||||
snippet: '\\bibliography{$1}',
|
||||
meta: 'cmd',
|
||||
score: 0.2659628337907604,
|
||||
},
|
||||
{
|
||||
caption: '\\linewidth',
|
||||
snippet: '\\linewidth',
|
||||
meta: 'cmd',
|
||||
score: 0.2639498312518439,
|
||||
},
|
||||
{
|
||||
caption: '\\delta',
|
||||
snippet: '\\delta',
|
||||
meta: 'cmd',
|
||||
score: 0.2620578600722735,
|
||||
},
|
||||
{
|
||||
caption: '\\sigma',
|
||||
snippet: '\\sigma',
|
||||
meta: 'cmd',
|
||||
score: 0.25940147926344487,
|
||||
},
|
||||
{
|
||||
caption: '\\pi',
|
||||
snippet: '\\pi',
|
||||
meta: 'cmd',
|
||||
score: 0.25920934567729714,
|
||||
},
|
||||
{
|
||||
caption: '\\hat{}',
|
||||
snippet: '\\hat{$1}',
|
||||
meta: 'cmd',
|
||||
score: 0.25264309033778715,
|
||||
},
|
||||
{
|
||||
caption: '\\bibliographystyle{}',
|
||||
snippet: '\\bibliographystyle{$1}',
|
||||
meta: 'cmd',
|
||||
score: 0.25122317941387773,
|
||||
},
|
||||
{
|
||||
caption: '\\small',
|
||||
snippet: '\\small',
|
||||
meta: 'cmd',
|
||||
score: 0.2447632045426295,
|
||||
},
|
||||
{
|
||||
caption: '\\LaTeX',
|
||||
snippet: '\\LaTeX',
|
||||
meta: 'cmd',
|
||||
score: 0.2334089308452787,
|
||||
},
|
||||
{
|
||||
caption: '\\cdot',
|
||||
snippet: '\\cdot',
|
||||
meta: 'cmd',
|
||||
score: 0.23029085545522762,
|
||||
},
|
||||
{
|
||||
caption: '\\footnote{}',
|
||||
snippet: '\\footnote{$1}',
|
||||
meta: 'cmd',
|
||||
score: 0.2253056071787701,
|
||||
},
|
||||
{
|
||||
caption: '\\newtheorem{}{}',
|
||||
snippet: '\\newtheorem{$1}{$2}',
|
||||
meta: 'cmd',
|
||||
score: 0.215689795055434,
|
||||
},
|
||||
{
|
||||
caption: '\\Delta',
|
||||
snippet: '\\Delta',
|
||||
meta: 'cmd',
|
||||
score: 0.21386475063892618,
|
||||
},
|
||||
{
|
||||
caption: '\\tau',
|
||||
snippet: '\\tau',
|
||||
meta: 'cmd',
|
||||
score: 0.21236188205859796,
|
||||
},
|
||||
{
|
||||
caption: '\\hfill',
|
||||
snippet: '\\hfill',
|
||||
meta: 'cmd',
|
||||
score: 0.2058248088519886,
|
||||
},
|
||||
{
|
||||
caption: '\\leq',
|
||||
snippet: '\\leq',
|
||||
meta: 'cmd',
|
||||
score: 0.20498894440637172,
|
||||
},
|
||||
{
|
||||
caption: '\\footnotesize',
|
||||
snippet: '\\footnotesize',
|
||||
meta: 'cmd',
|
||||
score: 0.2038592081252624,
|
||||
},
|
||||
{
|
||||
caption: '\\large',
|
||||
snippet: '\\large',
|
||||
meta: 'cmd',
|
||||
score: 0.20377416734108866,
|
||||
},
|
||||
{
|
||||
caption: '\\sqrt{}',
|
||||
snippet: '\\sqrt{$1}',
|
||||
meta: 'cmd',
|
||||
score: 0.20240160977404634,
|
||||
},
|
||||
{
|
||||
caption: '\\epsilon',
|
||||
snippet: '\\epsilon',
|
||||
meta: 'cmd',
|
||||
score: 0.2005136761359043,
|
||||
},
|
||||
{
|
||||
caption: '\\Large',
|
||||
snippet: '\\Large',
|
||||
meta: 'cmd',
|
||||
score: 0.1987771081149759,
|
||||
},
|
||||
{
|
||||
caption: '\\rho',
|
||||
snippet: '\\rho',
|
||||
meta: 'cmd',
|
||||
score: 0.1959287380541684,
|
||||
},
|
||||
{
|
||||
caption: '\\omega',
|
||||
snippet: '\\omega',
|
||||
meta: 'cmd',
|
||||
score: 0.19326783415115262,
|
||||
},
|
||||
{
|
||||
caption: '\\mathrm{}',
|
||||
snippet: '\\mathrm{$1}',
|
||||
meta: 'cmd',
|
||||
score: 0.19117752976172653,
|
||||
},
|
||||
{
|
||||
caption: '\\boldsymbol{}',
|
||||
snippet: '\\boldsymbol{$1}',
|
||||
meta: 'cmd',
|
||||
score: 0.18137737738638837,
|
||||
},
|
||||
{
|
||||
caption: '\\gamma',
|
||||
snippet: '\\gamma',
|
||||
meta: 'cmd',
|
||||
score: 0.17940276535431304,
|
||||
},
|
||||
{
|
||||
caption: '\\clearpage',
|
||||
snippet: '\\clearpage',
|
||||
meta: 'cmd',
|
||||
score: 0.1789117552185788,
|
||||
},
|
||||
{
|
||||
caption: '\\infty',
|
||||
snippet: '\\infty',
|
||||
meta: 'cmd',
|
||||
score: 0.17837290019711305,
|
||||
},
|
||||
{
|
||||
caption: '\\phi',
|
||||
snippet: '\\phi',
|
||||
meta: 'cmd',
|
||||
score: 0.17405809173097808,
|
||||
},
|
||||
{
|
||||
caption: '\\partial',
|
||||
snippet: '\\partial',
|
||||
meta: 'cmd',
|
||||
score: 0.17168102367966637,
|
||||
},
|
||||
{
|
||||
caption: '\\include{}',
|
||||
snippet: '\\include{$1}',
|
||||
meta: 'cmd',
|
||||
score: 0.1547080054979312,
|
||||
},
|
||||
{
|
||||
caption: '\\address{}',
|
||||
snippet: '\\address{$1}',
|
||||
meta: 'cmd',
|
||||
score: 0.1525055392611109,
|
||||
},
|
||||
{
|
||||
caption: '\\quad',
|
||||
snippet: '\\quad',
|
||||
meta: 'cmd',
|
||||
score: 0.15242755832392743,
|
||||
},
|
||||
{
|
||||
caption: '\\paragraph{}',
|
||||
snippet: '\\paragraph{$1}',
|
||||
meta: 'cmd',
|
||||
score: 0.152074250347974,
|
||||
},
|
||||
{
|
||||
caption: '\\varepsilon',
|
||||
snippet: '\\varepsilon',
|
||||
meta: 'cmd',
|
||||
score: 0.05411564201390573,
|
||||
},
|
||||
{
|
||||
caption: '\\zeta',
|
||||
snippet: '\\zeta',
|
||||
meta: 'cmd',
|
||||
score: 0.023330249803752954,
|
||||
},
|
||||
{
|
||||
caption: '\\eta',
|
||||
snippet: '\\eta',
|
||||
meta: 'cmd',
|
||||
score: 0.11088718379889091,
|
||||
},
|
||||
{
|
||||
caption: '\\vartheta',
|
||||
snippet: '\\vartheta',
|
||||
meta: 'cmd',
|
||||
score: 0.0025822992078068712,
|
||||
},
|
||||
{
|
||||
caption: '\\iota',
|
||||
snippet: '\\iota',
|
||||
meta: 'cmd',
|
||||
score: 0.0024774003791525486,
|
||||
},
|
||||
{
|
||||
caption: '\\kappa',
|
||||
snippet: '\\kappa',
|
||||
meta: 'cmd',
|
||||
score: 0.04887876299369008,
|
||||
},
|
||||
{
|
||||
caption: '\\nu',
|
||||
snippet: '\\nu',
|
||||
meta: 'cmd',
|
||||
score: 0.09206962821059342,
|
||||
},
|
||||
{
|
||||
caption: '\\xi',
|
||||
snippet: '\\xi',
|
||||
meta: 'cmd',
|
||||
score: 0.06496042899265699,
|
||||
},
|
||||
{
|
||||
caption: '\\varpi',
|
||||
snippet: '\\varpi',
|
||||
meta: 'cmd',
|
||||
score: 0.0007039358167790341,
|
||||
},
|
||||
{
|
||||
caption: '\\varrho',
|
||||
snippet: '\\varrho',
|
||||
meta: 'cmd',
|
||||
score: 0.0011279491613898612,
|
||||
},
|
||||
{
|
||||
caption: '\\varsigma',
|
||||
snippet: '\\varsigma',
|
||||
meta: 'cmd',
|
||||
score: 0.0010424880711234978,
|
||||
},
|
||||
{
|
||||
caption: '\\upsilon',
|
||||
snippet: '\\upsilon',
|
||||
meta: 'cmd',
|
||||
score: 0.00420715572598688,
|
||||
},
|
||||
{
|
||||
caption: '\\varphi',
|
||||
snippet: '\\varphi',
|
||||
meta: 'cmd',
|
||||
score: 0.03351251516668212,
|
||||
},
|
||||
{
|
||||
caption: '\\chi',
|
||||
snippet: '\\chi',
|
||||
meta: 'cmd',
|
||||
score: 0.043373492287805675,
|
||||
},
|
||||
{
|
||||
caption: '\\psi',
|
||||
snippet: '\\psi',
|
||||
meta: 'cmd',
|
||||
score: 0.09994508706163642,
|
||||
},
|
||||
{
|
||||
caption: '\\Gamma',
|
||||
snippet: '\\Gamma',
|
||||
meta: 'cmd',
|
||||
score: 0.04801549269801977,
|
||||
},
|
||||
{
|
||||
caption: '\\Theta',
|
||||
snippet: '\\Theta',
|
||||
meta: 'cmd',
|
||||
score: 0.038090902146599444,
|
||||
},
|
||||
{
|
||||
caption: '\\Lambda',
|
||||
snippet: '\\Lambda',
|
||||
meta: 'cmd',
|
||||
score: 0.032206594305977686,
|
||||
},
|
||||
{
|
||||
caption: '\\Xi',
|
||||
snippet: '\\Xi',
|
||||
meta: 'cmd',
|
||||
score: 0.01060997225400494,
|
||||
},
|
||||
{
|
||||
caption: '\\Pi',
|
||||
snippet: '\\Pi',
|
||||
meta: 'cmd',
|
||||
score: 0.021264671817473237,
|
||||
},
|
||||
{
|
||||
caption: '\\Sigma',
|
||||
snippet: '\\Sigma',
|
||||
meta: 'cmd',
|
||||
score: 0.05769642802079917,
|
||||
},
|
||||
{
|
||||
caption: '\\Upsilon',
|
||||
snippet: '\\Upsilon',
|
||||
meta: 'cmd',
|
||||
score: 0.00032875192955749566,
|
||||
},
|
||||
{
|
||||
caption: '\\Phi',
|
||||
snippet: '\\Phi',
|
||||
meta: 'cmd',
|
||||
score: 0.0538724950042562,
|
||||
},
|
||||
{
|
||||
caption: '\\Psi',
|
||||
snippet: '\\Psi',
|
||||
meta: 'cmd',
|
||||
score: 0.03056589143021648,
|
||||
},
|
||||
{
|
||||
caption: '\\Omega',
|
||||
snippet: '\\Omega',
|
||||
meta: 'cmd',
|
||||
score: 0.09490387997853639,
|
||||
},
|
||||
]
|
||||
@@ -0,0 +1,96 @@
|
||||
import { applySnippet, extendOverUnpairedClosingBrace } from './apply'
|
||||
import { Completion, CompletionContext } from '@codemirror/autocomplete'
|
||||
import { documentCommands } from '../document-commands'
|
||||
import { Command } from '../../../utils/tree-operations/commands'
|
||||
import { syntaxTree } from '@codemirror/language'
|
||||
|
||||
const commandNameFromLabel = (label: string): string | undefined =>
|
||||
label.match(/^\\\w+/)?.[0]
|
||||
|
||||
export function customCommandCompletions(
|
||||
context: CompletionContext,
|
||||
commandCompletions: Completion[]
|
||||
) {
|
||||
const existingCommands = new Set(
|
||||
commandCompletions
|
||||
.map(item => commandNameFromLabel(item.label))
|
||||
.filter(Boolean)
|
||||
)
|
||||
|
||||
const output: Completion[] = []
|
||||
|
||||
const items = countCommandUsage(context)
|
||||
|
||||
for (const item of items.values()) {
|
||||
if (
|
||||
!existingCommands.has(commandNameFromLabel(item.label)) &&
|
||||
!item.ignoreInAutoComplete
|
||||
) {
|
||||
output.push({
|
||||
type: 'cmd',
|
||||
label: item.label,
|
||||
boost: Math.max(0, item.count - 10),
|
||||
apply: applySnippet(item.snippet),
|
||||
extend: extendOverUnpairedClosingBrace,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
return commandCompletions.concat(output)
|
||||
}
|
||||
|
||||
const countCommandUsage = (context: CompletionContext) => {
|
||||
const tree = syntaxTree(context.state)
|
||||
const currentNode = tree.resolveInner(context.pos, -1)
|
||||
|
||||
const result = new Map<
|
||||
string,
|
||||
{
|
||||
label: string
|
||||
snippet: string
|
||||
count: number
|
||||
ignoreInAutoComplete?: boolean
|
||||
}
|
||||
>()
|
||||
|
||||
const commandListProjection = context.state.field(documentCommands)
|
||||
|
||||
if (!commandListProjection.items) {
|
||||
return result
|
||||
}
|
||||
|
||||
for (const command of commandListProjection.items) {
|
||||
if (command.from === currentNode.from) {
|
||||
continue
|
||||
}
|
||||
const label = buildLabel(command)
|
||||
const snippet = buildSnippet(command)
|
||||
|
||||
const item = result.get(label) || {
|
||||
label,
|
||||
snippet,
|
||||
count: 0,
|
||||
ignoreInAutoComplete: command.ignoreInAutocomplete,
|
||||
}
|
||||
item.count++
|
||||
result.set(label, item)
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
const buildLabel = (command: Command): string => {
|
||||
return [
|
||||
`${command.title}`,
|
||||
'[]'.repeat(command.optionalArgCount ?? 0),
|
||||
'{}'.repeat(command.requiredArgCount ?? 0),
|
||||
].join('')
|
||||
}
|
||||
|
||||
const buildSnippet = (command: Command): string => {
|
||||
return [
|
||||
`${command.title}`,
|
||||
'[#{}]'.repeat(command.optionalArgCount ?? 0),
|
||||
'{#{}}'.repeat(command.requiredArgCount ?? 0),
|
||||
].join('')
|
||||
}
|
||||
@@ -0,0 +1,42 @@
|
||||
import { customBeginCompletion } from './environments'
|
||||
import { CompletionContext } from '@codemirror/autocomplete'
|
||||
import { documentEnvironments } from '../document-environments'
|
||||
import { ProjectionResult } from '../../../utils/tree-operations/projection'
|
||||
import { Environment } from '../../../utils/tree-operations/environments'
|
||||
|
||||
/**
|
||||
* Environments from the current doc
|
||||
*/
|
||||
export function customEnvironmentCompletions(context: CompletionContext) {
|
||||
const items = findEnvironmentsInDoc(context)
|
||||
|
||||
const completions = []
|
||||
|
||||
for (const env of items.values()) {
|
||||
const completion = customBeginCompletion(env)
|
||||
if (completion) {
|
||||
completions.push(completion)
|
||||
}
|
||||
}
|
||||
|
||||
return completions
|
||||
}
|
||||
|
||||
export const findEnvironmentsInDoc = (context: CompletionContext) => {
|
||||
const result = new Set<string>()
|
||||
|
||||
const environmentNamesProjection: ProjectionResult<Environment> =
|
||||
context.state.field(documentEnvironments)
|
||||
if (!environmentNamesProjection || !environmentNamesProjection.items) {
|
||||
return result
|
||||
}
|
||||
|
||||
for (const environment of environmentNamesProjection.items) {
|
||||
// include the environment name if it's outside the current context
|
||||
if (environment.to < context.pos || environment.from > context.pos) {
|
||||
result.add(environment.title)
|
||||
}
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
@@ -0,0 +1,68 @@
|
||||
import { environments, snippet } from './data/environments'
|
||||
import { applySnippet, extendOverUnpairedClosingBrace } from './apply'
|
||||
import { Completion, CompletionContext } from '@codemirror/autocomplete'
|
||||
import { Completions } from './types'
|
||||
|
||||
/**
|
||||
* Environments from bundled data
|
||||
*/
|
||||
export function buildEnvironmentCompletions(completions: Completions) {
|
||||
for (const [item, snippet] of environments) {
|
||||
// clear snippet for some environments after inserting
|
||||
const clear =
|
||||
item === 'abstract' || item === 'itemize' || item === 'enumerate'
|
||||
completions.commands.push({
|
||||
type: 'env',
|
||||
label: `\\begin{${item}} …`,
|
||||
apply: applySnippet(snippet, clear),
|
||||
extend: extendOverUnpairedClosingBrace,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* A `begin` environment completion with a snippet, for the current context
|
||||
*/
|
||||
export function customBeginCompletion(name: string): Completion | null {
|
||||
if (environments.has(name)) {
|
||||
return null
|
||||
}
|
||||
|
||||
return {
|
||||
label: `\\begin{${name}} …`,
|
||||
apply: applySnippet(snippet(name)),
|
||||
extend: extendOverUnpairedClosingBrace,
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* `end` completions for open environments in the current doc, up to the current context
|
||||
* @return {*[]}
|
||||
*/
|
||||
export function customEndCompletions(context: CompletionContext): Completion[] {
|
||||
const openEnvironments = new Set<string>()
|
||||
|
||||
for (const line of context.state.doc.iterRange(0, context.pos)) {
|
||||
for (const match of line.matchAll(/\\(?<cmd>begin|end){(?<env>[^}]+)}/g)) {
|
||||
const { cmd, env } = match.groups as { cmd: string; env: string }
|
||||
|
||||
if (cmd === 'begin') {
|
||||
openEnvironments.add(env)
|
||||
} else {
|
||||
openEnvironments.delete(env)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const completions: Completion[] = []
|
||||
|
||||
let boost = 10
|
||||
for (const env of openEnvironments) {
|
||||
completions.push({
|
||||
label: env,
|
||||
boost: boost++, // environments opened later rank higher
|
||||
})
|
||||
}
|
||||
|
||||
return completions
|
||||
}
|
||||
@@ -0,0 +1,104 @@
|
||||
import { CompletionContext } from '@codemirror/autocomplete'
|
||||
import {
|
||||
extendOverUnpairedClosingBrace,
|
||||
extendRequiredParameter,
|
||||
} from './apply'
|
||||
import { Folder } from '../../../../../../../types/folder'
|
||||
import { Completions } from './types'
|
||||
import { metadataState } from '../../../extensions/language'
|
||||
|
||||
// TODO: case-insensitive regex
|
||||
|
||||
function removeBibExtension(path: string) {
|
||||
return path.replace(/\.bib$/, '')
|
||||
}
|
||||
|
||||
function removeTexExtension(path: string) {
|
||||
return path.replace(/\.tex$/, '')
|
||||
}
|
||||
|
||||
/**
|
||||
* Completions based on files in the project
|
||||
*/
|
||||
export function buildIncludeCompletions(
|
||||
completions: Completions,
|
||||
context: CompletionContext
|
||||
) {
|
||||
const metadata = context.state.field(metadataState, false)
|
||||
|
||||
if (!metadata?.fileTreeData) {
|
||||
return
|
||||
}
|
||||
|
||||
// files in the project folder
|
||||
const processFile = (path: string) => {
|
||||
if (/\.(?:tex|txt)$/.test(path)) {
|
||||
// path parameter for \include{path} or \input{path}
|
||||
completions.includes.push({
|
||||
type: 'file',
|
||||
label: path,
|
||||
apply: removeTexExtension(path),
|
||||
extend: extendRequiredParameter,
|
||||
})
|
||||
|
||||
// \include{path}
|
||||
completions.commands.push({
|
||||
type: 'cmd',
|
||||
label: `\\include{${path}}`,
|
||||
apply: `\\include{${removeTexExtension(path)}}`,
|
||||
extend: extendOverUnpairedClosingBrace,
|
||||
})
|
||||
|
||||
// \input{path}
|
||||
completions.commands.push({
|
||||
type: 'cmd',
|
||||
label: `\\input{${path}}`,
|
||||
apply: `\\input{${removeTexExtension(path)}}`,
|
||||
extend: extendOverUnpairedClosingBrace,
|
||||
})
|
||||
}
|
||||
|
||||
// TODO: a better list of graphics extensions?
|
||||
if (/\.(eps|jpe?g|gif|png|tiff?|pdf|svg)$/i.test(path)) {
|
||||
// path parameter for \includegraphics{path}
|
||||
completions.graphics.push({
|
||||
type: 'file',
|
||||
label: path,
|
||||
extend: extendRequiredParameter,
|
||||
})
|
||||
|
||||
completions.commands.push({
|
||||
type: 'cmd',
|
||||
label: `\\includegraphics{${path}}`,
|
||||
extend: extendOverUnpairedClosingBrace,
|
||||
})
|
||||
}
|
||||
|
||||
if (/\.bib$/.test(path)) {
|
||||
const label = removeBibExtension(path)
|
||||
// path without extension for \bibliography{path}
|
||||
completions.bibliographies.push({
|
||||
type: 'bib',
|
||||
label,
|
||||
extend: extendRequiredParameter,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// iterate through the files in a folder
|
||||
const processFolder = ({ folders, docs, fileRefs }: Folder, path = '') => {
|
||||
for (const doc of docs) {
|
||||
processFile(`${path}${doc.name}`)
|
||||
}
|
||||
|
||||
for (const fileRef of fileRefs) {
|
||||
processFile(`${path}${fileRef.name}`)
|
||||
}
|
||||
|
||||
for (const folder of folders) {
|
||||
processFolder(folder, `${path}${folder.name}/`)
|
||||
}
|
||||
}
|
||||
|
||||
processFolder(metadata.fileTreeData)
|
||||
}
|
||||
@@ -0,0 +1,26 @@
|
||||
import { buildIncludeCompletions } from './include'
|
||||
import { buildLabelCompletions } from './labels'
|
||||
import { buildPackageCompletions } from './packages'
|
||||
import { buildSnippetCompletions } from './snippets'
|
||||
import { buildEnvironmentCompletions } from './environments'
|
||||
import { buildReferenceCompletions } from './references'
|
||||
import { buildClassCompletions } from './classes'
|
||||
import { Completions } from './types'
|
||||
import { buildBibliographyStyleCompletions } from './bibliography-styles'
|
||||
import { CompletionContext } from '@codemirror/autocomplete'
|
||||
|
||||
export const buildAllCompletions = (
|
||||
completions: Completions,
|
||||
context: CompletionContext
|
||||
) => {
|
||||
buildSnippetCompletions(completions)
|
||||
buildEnvironmentCompletions(completions)
|
||||
buildClassCompletions(completions)
|
||||
buildBibliographyStyleCompletions(completions)
|
||||
buildIncludeCompletions(completions, context)
|
||||
buildReferenceCompletions(completions, context)
|
||||
buildLabelCompletions(completions, context)
|
||||
buildPackageCompletions(completions, context)
|
||||
|
||||
return completions
|
||||
}
|
||||
@@ -0,0 +1,26 @@
|
||||
import { extendRequiredParameter } from './apply'
|
||||
import { Completions } from './types'
|
||||
import { metadataState } from '../../../extensions/language'
|
||||
import { CompletionContext } from '@codemirror/autocomplete'
|
||||
|
||||
/**
|
||||
* Labels parsed from docs in the project, for cross-referencing
|
||||
*/
|
||||
export function buildLabelCompletions(
|
||||
completions: Completions,
|
||||
context: CompletionContext
|
||||
) {
|
||||
const metadata = context.state.field(metadataState, false)
|
||||
|
||||
if (!metadata) {
|
||||
return
|
||||
}
|
||||
|
||||
for (const label of metadata.labels) {
|
||||
completions.labels.push({
|
||||
type: 'label',
|
||||
label,
|
||||
extend: extendRequiredParameter,
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,96 @@
|
||||
import {
|
||||
applySnippet,
|
||||
extendOverUnpairedClosingBrace,
|
||||
extendRequiredParameter,
|
||||
} from './apply'
|
||||
import { packageNames } from './data/package-names'
|
||||
import { Completions } from './types'
|
||||
import { CompletionContext } from '@codemirror/autocomplete'
|
||||
import { metadataState } from '../../../extensions/language'
|
||||
|
||||
/**
|
||||
* Completions based on package names from bundled data and packages in the project
|
||||
*/
|
||||
export function buildPackageCompletions(
|
||||
completions: Completions,
|
||||
context: CompletionContext
|
||||
) {
|
||||
const metadata = context.state.field(metadataState, false)
|
||||
|
||||
if (!metadata) {
|
||||
return
|
||||
}
|
||||
|
||||
// commands from packages in the project
|
||||
for (const command of metadata.commands) {
|
||||
completions.commands.push({
|
||||
type: command.meta,
|
||||
label: command.caption,
|
||||
apply: applySnippet(command.snippet),
|
||||
extend: extendOverUnpairedClosingBrace,
|
||||
})
|
||||
}
|
||||
|
||||
const uniquePackageNames = new Set<string>(packageNames)
|
||||
|
||||
// package names from packages in the project
|
||||
for (const packageName of metadata.packageNames) {
|
||||
uniquePackageNames.add(packageName)
|
||||
}
|
||||
|
||||
// exclude package names that are already in this document
|
||||
const existingPackageNames = findExistingPackageNames(context)
|
||||
|
||||
for (const item of uniquePackageNames) {
|
||||
if (!existingPackageNames.has(item)) {
|
||||
// package name parameter completion
|
||||
completions.packages.push({
|
||||
type: 'pkg',
|
||||
label: item,
|
||||
extend: extendRequiredParameter,
|
||||
})
|
||||
|
||||
const label = `\\usepackage{${item}}`
|
||||
|
||||
// full command with parameter completion
|
||||
completions.commands.push({
|
||||
type: 'pkg',
|
||||
label,
|
||||
extend: extendOverUnpairedClosingBrace,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// empty \\usepackage{…} snippet
|
||||
completions.commands.push({
|
||||
type: 'pkg',
|
||||
label: '\\usepackage{}',
|
||||
boost: 10,
|
||||
apply: applySnippet('\\usepackage{#{}}'),
|
||||
extend: extendOverUnpairedClosingBrace,
|
||||
})
|
||||
}
|
||||
|
||||
const findExistingPackageNames = (context: CompletionContext) => {
|
||||
const { doc } = context.state
|
||||
|
||||
const excludeLineNumber = doc.lineAt(context.pos).number
|
||||
|
||||
const items = new Set<string>()
|
||||
|
||||
let currentLineNumber = 1
|
||||
for (const line of doc.iterLines()) {
|
||||
if (currentLineNumber++ === excludeLineNumber) {
|
||||
continue
|
||||
}
|
||||
|
||||
// TODO: exclude comments
|
||||
|
||||
for (const match of line.matchAll(/\\usepackage(\[.+?])?{(?<name>\w+)}/g)) {
|
||||
const { name } = match.groups as { name: string }
|
||||
items.add(name)
|
||||
}
|
||||
}
|
||||
|
||||
return items
|
||||
}
|
||||
@@ -0,0 +1,32 @@
|
||||
/**
|
||||
* `cite` completions based on reference keys in the project
|
||||
*/
|
||||
import { CompletionContext } from '@codemirror/autocomplete'
|
||||
import { Completions } from './types'
|
||||
import { metadataState } from '../../../extensions/language'
|
||||
import { extendRequiredParameter } from './apply'
|
||||
import { maybeGetSectionForOption } from './sections'
|
||||
|
||||
export function buildReferenceCompletions(
|
||||
completions: Completions,
|
||||
context: CompletionContext
|
||||
) {
|
||||
const metadata = context.state.field(metadataState, false)
|
||||
|
||||
if (!metadata) {
|
||||
return
|
||||
}
|
||||
|
||||
for (const referenceKey of metadata.referenceKeys) {
|
||||
completions.references.push({
|
||||
type: 'reference',
|
||||
label: referenceKey,
|
||||
extend: extendRequiredParameter,
|
||||
section: maybeGetSectionForOption(context, 'references'),
|
||||
deduplicate: {
|
||||
key: referenceKey,
|
||||
priority: 1,
|
||||
},
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,25 @@
|
||||
import { CompletionContext, CompletionSection } from '@codemirror/autocomplete'
|
||||
import importOverleafModules from '../../../../../../macros/import-overleaf-module.macro'
|
||||
|
||||
type SectionGenerator = (
|
||||
context: CompletionContext,
|
||||
type: string
|
||||
) => CompletionSection | string | undefined
|
||||
const sectionTitleGenerators: Array<SectionGenerator> = importOverleafModules(
|
||||
'sectionTitleGenerators'
|
||||
).map(
|
||||
(item: { import: { getSection: SectionGenerator } }) => item.import.getSection
|
||||
)
|
||||
|
||||
export function maybeGetSectionForOption(
|
||||
context: CompletionContext,
|
||||
type: string
|
||||
) {
|
||||
for (const generator of sectionTitleGenerators) {
|
||||
const section = generator(context, type)
|
||||
if (section !== undefined) {
|
||||
return section
|
||||
}
|
||||
}
|
||||
return undefined
|
||||
}
|
||||
@@ -0,0 +1,29 @@
|
||||
import topHundredSnippets from './data/top-hundred-snippets'
|
||||
import snippets from './data/snippets'
|
||||
import { applySnippet, extendOverUnpairedClosingBrace } from './apply'
|
||||
import { Completions } from './types'
|
||||
|
||||
/**
|
||||
* Completions based on bundled snippets
|
||||
*/
|
||||
export function buildSnippetCompletions(completions: Completions) {
|
||||
for (const item of topHundredSnippets) {
|
||||
completions.commands.push({
|
||||
type: item.meta,
|
||||
label: item.caption,
|
||||
boost: item.score,
|
||||
apply:
|
||||
item.snippet === item.caption ? undefined : applySnippet(item.snippet),
|
||||
extend: extendOverUnpairedClosingBrace,
|
||||
})
|
||||
}
|
||||
|
||||
for (const item of snippets) {
|
||||
completions.commands.push({
|
||||
type: item.type,
|
||||
label: item.label,
|
||||
apply: applySnippet(item.snippet),
|
||||
extend: extendOverUnpairedClosingBrace,
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,3 @@
|
||||
import { Completion } from '@codemirror/autocomplete'
|
||||
|
||||
export type Completions = Record<string, Completion[]>
|
||||
@@ -0,0 +1,4 @@
|
||||
import { Command, enterNode } from '../../utils/tree-operations/commands'
|
||||
import { makeProjectionStateField } from '../../utils/projection-state-field'
|
||||
|
||||
export const documentCommands = makeProjectionStateField<Command>(enterNode)
|
||||
@@ -0,0 +1,8 @@
|
||||
import {
|
||||
Environment,
|
||||
enterNode,
|
||||
} from '../../utils/tree-operations/environments'
|
||||
import { makeProjectionStateField } from '../../utils/projection-state-field'
|
||||
|
||||
export const documentEnvironments =
|
||||
makeProjectionStateField<Environment>(enterNode)
|
||||
@@ -0,0 +1,5 @@
|
||||
import { enterNode, FlatOutlineItem } from '../../utils/tree-operations/outline'
|
||||
import { makeProjectionStateField } from '../../utils/projection-state-field'
|
||||
|
||||
export const documentOutline =
|
||||
makeProjectionStateField<FlatOutlineItem>(enterNode)
|
||||
@@ -0,0 +1,52 @@
|
||||
import { latexIndentService } from './latex-indent-service'
|
||||
import { shortcuts } from './shortcuts'
|
||||
import { linting } from './linting'
|
||||
import { LanguageSupport } from '@codemirror/language'
|
||||
import { CompletionSource } from '@codemirror/autocomplete'
|
||||
import { openAutocomplete } from './open-autocomplete'
|
||||
import { metadata } from './metadata'
|
||||
import {
|
||||
argumentCompletionSources,
|
||||
explicitCommandCompletionSource,
|
||||
inCommandCompletionSource,
|
||||
beginEnvironmentCompletionSource,
|
||||
} from './complete'
|
||||
import { documentCommands } from './document-commands'
|
||||
import importOverleafModules from '../../../../../macros/import-overleaf-module.macro'
|
||||
import { documentOutline } from './document-outline'
|
||||
import { LaTeXLanguage } from './latex-language'
|
||||
import { documentEnvironments } from './document-environments'
|
||||
import {
|
||||
figureModal,
|
||||
figureModalPasteHandler,
|
||||
} from '../../extensions/figure-modal'
|
||||
|
||||
const completionSources: CompletionSource[] = [
|
||||
...argumentCompletionSources,
|
||||
inCommandCompletionSource,
|
||||
explicitCommandCompletionSource,
|
||||
beginEnvironmentCompletionSource,
|
||||
...importOverleafModules('sourceEditorCompletionSources').map(
|
||||
(item: any) => item.import.default
|
||||
),
|
||||
]
|
||||
|
||||
export const latex = () => {
|
||||
return new LanguageSupport(LaTeXLanguage, [
|
||||
shortcuts(),
|
||||
documentOutline,
|
||||
documentCommands,
|
||||
documentEnvironments,
|
||||
latexIndentService(),
|
||||
linting(),
|
||||
metadata(),
|
||||
openAutocomplete(),
|
||||
...completionSources.map(completionSource =>
|
||||
LaTeXLanguage.data.of({
|
||||
autocomplete: completionSource,
|
||||
})
|
||||
),
|
||||
figureModal(),
|
||||
figureModalPasteHandler(),
|
||||
])
|
||||
}
|
||||
@@ -0,0 +1,10 @@
|
||||
import { indentService } from '@codemirror/language'
|
||||
|
||||
export const latexIndentService = () =>
|
||||
indentService.of((indentContext, pos) => {
|
||||
// only use this for insertNewLineAndIndent
|
||||
if (indentContext.simulatedBreak) {
|
||||
// match the indentation of the previous line (if present)
|
||||
return null
|
||||
}
|
||||
})
|
||||
@@ -0,0 +1,233 @@
|
||||
import { LRLanguage, foldNodeProp, foldInside } from '@codemirror/language'
|
||||
import { parser } from '../../lezer-latex/latex.mjs'
|
||||
import { styleTags, tags as t } from '@lezer/highlight'
|
||||
import * as termsModule from '../../lezer-latex/latex.terms.mjs'
|
||||
import { NodeProp } from '@lezer/common'
|
||||
import {
|
||||
Tokens,
|
||||
commentIsOpenFold,
|
||||
findClosingFoldComment,
|
||||
getFoldRange,
|
||||
} from '../../utils/tree-query'
|
||||
import { closeBracketConfig } from './close-bracket-config'
|
||||
import { noSpellCheckProp } from '@/features/source-editor/utils/node-props'
|
||||
|
||||
const styleOverrides: Record<string, any> = {
|
||||
DocumentClassCtrlSeq: t.keyword,
|
||||
UsePackageCtrlSeq: t.keyword,
|
||||
CiteCtrlSeq: t.keyword,
|
||||
CiteStarrableCtrlSeq: t.keyword,
|
||||
RefCtrlSeq: t.keyword,
|
||||
RefStarrableCtrlSeq: t.keyword,
|
||||
LabelCtrlSeq: t.keyword,
|
||||
}
|
||||
|
||||
const styleEntry = (token: string, defaultStyle: any) => {
|
||||
return [token, styleOverrides[token] || defaultStyle]
|
||||
}
|
||||
|
||||
const Styles = {
|
||||
ctrlSeq: Object.fromEntries(
|
||||
Tokens.ctrlSeq.map(token => styleEntry(token, t.tagName))
|
||||
),
|
||||
ctrlSym: Object.fromEntries(
|
||||
Tokens.ctrlSym.map(token => styleEntry(token, t.literal))
|
||||
),
|
||||
envName: Object.fromEntries(
|
||||
Tokens.envName.map(token => styleEntry(token, t.attributeValue))
|
||||
),
|
||||
}
|
||||
|
||||
const typeMap: Record<string, string[]> = {
|
||||
// commands that are section headings
|
||||
PartCtrlSeq: ['$SectioningCtrlSeq'],
|
||||
ChapterCtrlSeq: ['$SectioningCtrlSeq'],
|
||||
SectionCtrlSeq: ['$SectioningCtrlSeq'],
|
||||
SubSectionCtrlSeq: ['$SectioningCtrlSeq'],
|
||||
SubSubSectionCtrlSeq: ['$SectioningCtrlSeq'],
|
||||
ParagraphCtrlSeq: ['$SectioningCtrlSeq'],
|
||||
SubParagraphCtrlSeq: ['$SectioningCtrlSeq'],
|
||||
// commands that have a "command tooltip"
|
||||
HrefCommand: ['$CommandTooltipCommand'],
|
||||
Include: ['$CommandTooltipCommand'],
|
||||
Input: ['$CommandTooltipCommand'],
|
||||
Ref: ['$CommandTooltipCommand'],
|
||||
UrlCommand: ['$CommandTooltipCommand'],
|
||||
// text formatting commands that can be toggled via the toolbar
|
||||
TextBoldCommand: ['$ToggleTextFormattingCommand'],
|
||||
TextItalicCommand: ['$ToggleTextFormattingCommand'],
|
||||
// text formatting commands that cannot be toggled via the toolbar
|
||||
TextSmallCapsCommand: ['$OtherTextFormattingCommand'],
|
||||
TextTeletypeCommand: ['$OtherTextFormattingCommand'],
|
||||
TextMediumCommand: ['$OtherTextFormattingCommand'],
|
||||
TextSansSerifCommand: ['$OtherTextFormattingCommand'],
|
||||
TextSuperscriptCommand: ['$OtherTextFormattingCommand'],
|
||||
TextSubscriptCommand: ['$OtherTextFormattingCommand'],
|
||||
StrikeOutCommand: ['$OtherTextFormattingCommand'],
|
||||
EmphasisCommand: ['$OtherTextFormattingCommand'],
|
||||
UnderlineCommand: ['$OtherTextFormattingCommand'],
|
||||
}
|
||||
|
||||
export const LaTeXLanguage = LRLanguage.define({
|
||||
name: 'latex',
|
||||
parser: parser.configure({
|
||||
props: [
|
||||
foldNodeProp.add({
|
||||
Comment: (node, state) => {
|
||||
if (commentIsOpenFold(node, state)) {
|
||||
const closingFoldNode = findClosingFoldComment(node, state)
|
||||
if (closingFoldNode) {
|
||||
return getFoldRange(node, closingFoldNode, state)
|
||||
}
|
||||
}
|
||||
return null
|
||||
},
|
||||
Group: foldInside,
|
||||
NonEmptyGroup: foldInside,
|
||||
TextArgument: foldInside,
|
||||
// TODO: Why isn't
|
||||
// `Content: node => node,`
|
||||
// enough? For some reason it doesn't work if there's a newline after
|
||||
// \section{a}, but works for \section{a}b
|
||||
$Environment: node => node.getChild('Content'),
|
||||
$Section: node => {
|
||||
const BACKWARDS = -1
|
||||
const lastChild = node.resolveInner(node.to, BACKWARDS)
|
||||
const content = node.getChild('Content')
|
||||
if (!content) {
|
||||
return null
|
||||
}
|
||||
if (lastChild.type.is(termsModule.NewLine)) {
|
||||
// Ignore last newline for sectioning commands
|
||||
return { from: content!.from, to: lastChild.from }
|
||||
}
|
||||
if (lastChild.type.is(termsModule.Whitespace)) {
|
||||
// If the sectioningcommand is indented on a newline
|
||||
let sibling = lastChild.prevSibling
|
||||
while (sibling?.type.is(termsModule.Whitespace)) {
|
||||
sibling = sibling.prevSibling
|
||||
}
|
||||
if (sibling?.type.is(termsModule.NewLine)) {
|
||||
return { from: content!.from, to: sibling.from }
|
||||
}
|
||||
}
|
||||
if (lastChild.type.is(termsModule.BlankLine)) {
|
||||
// HACK: BlankLine can contain any number above 2 of \n's.
|
||||
// Include every one except for the last one
|
||||
return { from: content!.from, to: lastChild.to - 1 }
|
||||
}
|
||||
return content
|
||||
},
|
||||
}),
|
||||
// disable spell check in these node types when they're inside these parents (empty string = any parent)
|
||||
noSpellCheckProp.add({
|
||||
BibKeyArgument: [['']],
|
||||
BibliographyArgument: [['']],
|
||||
BibliographyStyleArgument: [['']],
|
||||
DocumentClassArgument: [['']],
|
||||
LabelArgument: [['']],
|
||||
PackageArgument: [['']],
|
||||
RefArgument: [['']],
|
||||
OptionalArgument: [
|
||||
['DocumentClass'],
|
||||
['IncludeGraphics'],
|
||||
['LineBreak'],
|
||||
['UsePackage'],
|
||||
['FigureEnvironment', 'BeginEnv'],
|
||||
['ListEnvironment', 'BeginEnv'],
|
||||
],
|
||||
ShortTextArgument: [['Date'], ['SetLengthCommand']],
|
||||
TextArgument: [['TabularEnvironment', 'BeginEnv']],
|
||||
}),
|
||||
// TODO: does this override groups defined in the grammar?
|
||||
NodeProp.group.add(type => {
|
||||
const types = []
|
||||
|
||||
if (
|
||||
Tokens.ctrlSeq.includes(type.name) ||
|
||||
Tokens.ctrlSym.includes(type.name)
|
||||
) {
|
||||
types.push('$CtrlSeq')
|
||||
if (Tokens.ctrlSym.includes(type.name)) {
|
||||
types.push('$CtrlSym')
|
||||
}
|
||||
} else if (Tokens.envName.includes(type.name)) {
|
||||
types.push('$EnvName')
|
||||
} else if (type.name.endsWith('Command')) {
|
||||
types.push('$Command')
|
||||
} else if (type.name.endsWith('Argument')) {
|
||||
types.push('$Argument')
|
||||
if (
|
||||
type.name.endsWith('TextArgument') ||
|
||||
type.is('SectioningArgument')
|
||||
) {
|
||||
types.push('$TextArgument')
|
||||
}
|
||||
} else if (type.name.endsWith('Brace')) {
|
||||
types.push('$Brace')
|
||||
}
|
||||
|
||||
if (type.name in typeMap) {
|
||||
types.push(...typeMap[type.name])
|
||||
}
|
||||
|
||||
return types.length > 0 ? types : undefined
|
||||
}),
|
||||
styleTags({
|
||||
...Styles.ctrlSeq,
|
||||
...Styles.ctrlSym,
|
||||
...Styles.envName,
|
||||
'HrefCommand/ShortTextArgument/ShortArg/...': t.link,
|
||||
'HrefCommand/UrlArgument/...': t.monospace,
|
||||
'CtrlSeq Csname': t.tagName,
|
||||
'DocumentClass/OptionalArgument/ShortOptionalArg/...': t.attributeValue,
|
||||
'DocumentClass/ShortTextArgument/ShortArg/Normal': t.typeName,
|
||||
'ListEnvironment/BeginEnv/OptionalArgument/...': t.monospace,
|
||||
Number: t.number,
|
||||
OpenBrace: t.brace,
|
||||
CloseBrace: t.brace,
|
||||
OpenBracket: t.squareBracket,
|
||||
CloseBracket: t.squareBracket,
|
||||
Dollar: t.string,
|
||||
Math: t.string,
|
||||
'Math/MathChar': t.string,
|
||||
'Math/MathSpecialChar': t.string,
|
||||
'Math/Number': t.string,
|
||||
'MathGroup/OpenBrace MathGroup/CloseBrace': t.string,
|
||||
'MathTextCommand/TextArgument/OpenBrace MathTextCommand/TextArgument/CloseBrace':
|
||||
t.string,
|
||||
'MathOpening/LeftCtrlSeq MathClosing/RightCtrlSeq MathUnknownCommand/CtrlSeq MathTextCommand/CtrlSeq':
|
||||
t.literal,
|
||||
MathDelimiter: t.literal,
|
||||
DoubleDollar: t.keyword,
|
||||
Tilde: t.keyword,
|
||||
Ampersand: t.keyword,
|
||||
LineBreakCtrlSym: t.keyword,
|
||||
Comment: t.comment,
|
||||
'UsePackage/OptionalArgument/ShortOptionalArg/Normal': t.attributeValue,
|
||||
'UsePackage/ShortTextArgument/ShortArg/Normal': t.tagName,
|
||||
'Affiliation/OptionalArgument/ShortOptionalArg/Normal':
|
||||
t.attributeValue,
|
||||
'Affil/OptionalArgument/ShortOptionalArg/Normal': t.attributeValue,
|
||||
'LiteralArgContent VerbContent VerbatimContent LstInlineContent':
|
||||
t.string,
|
||||
'NewCommand/LiteralArgContent': t.typeName,
|
||||
'LabelArgument/ShortTextArgument/ShortArg/...': t.attributeValue,
|
||||
'RefArgument/ShortTextArgument/ShortArg/...': t.attributeValue,
|
||||
'BibKeyArgument/ShortTextArgument/ShortArg/...': t.attributeValue,
|
||||
'ShortTextArgument/ShortArg/Normal': t.monospace,
|
||||
'UrlArgument/LiteralArgContent': [t.attributeValue, t.url],
|
||||
'FilePathArgument/LiteralArgContent': t.attributeValue,
|
||||
'BareFilePathArgument/SpaceDelimitedLiteralArgContent':
|
||||
t.attributeValue,
|
||||
TrailingContent: t.comment,
|
||||
'Item/OptionalArgument/ShortOptionalArg/...': t.strong,
|
||||
// TODO: t.strong, t.emphasis
|
||||
}),
|
||||
],
|
||||
}),
|
||||
languageData: {
|
||||
commentTokens: { line: '%' },
|
||||
closeBrackets: closeBracketConfig,
|
||||
},
|
||||
})
|
||||
@@ -0,0 +1,122 @@
|
||||
# How the Ace latex linter works
|
||||
|
||||
The purpose of the linter is to check the following
|
||||
|
||||
- each open environment is closed in the correct order
|
||||
- `\begin` .. `\end`
|
||||
- `$` ... `$` # inline math
|
||||
- `$$` ... `$$` # display math
|
||||
- `{` ... `}` # grouping commands
|
||||
- `\left` .. `\right` # bracket commands (in math-mode only)
|
||||
- math-mode commands are only used in math-mode (e.g. `\alpha`, `^` and `_`)
|
||||
- text-specific commands are only used outside math-mode (e.g. `\chapter` should not be used in math-mode)
|
||||
|
||||
The general approach of the ace linter is as following
|
||||
|
||||
- skip over all of the file apart from the relevant tokens
|
||||
- iterate through the tokens keeping track of the current context (environment/math-mode)
|
||||
- report an error if
|
||||
- a token is not allowed in the current context
|
||||
- an environment is closed incorrectly
|
||||
|
||||
# Implementation
|
||||
|
||||
The implementation has two main phases
|
||||
|
||||
1. Tokenise
|
||||
2. InterpretTokens
|
||||
|
||||
The linter returns errors found in the InterpretTokens phase.
|
||||
|
||||
## Tokenise - finding tokens
|
||||
|
||||
The tokenizer starts from the beginning of the file, searching repeatedly for the next special character: `['\', '{', '}', '$', '&', '#', '^', '_', '~', '%']`.
|
||||
|
||||
When a special character is found, it is inspected and additional characters consumed according to the following TeX rules:
|
||||
|
||||
- `'\'` escape character: Handle TeX control sequences (`\foo`) and control symbols (`\@`).
|
||||
- followed by `[a-zA-Z]+`: it's a control sequence, consume whitespace after it
|
||||
- otherwise, it's a control symbol (single character)
|
||||
- any other special character `['{', '}', '$', '&', '#', '^', '_', '~']`: push it as a token
|
||||
- `'%'` comment: consume up to next newline.
|
||||
Special cases:
|
||||
- `%novalidate` disable validation in this file
|
||||
- `%begin novalidate`, `%end novalidate` disable validation in a region
|
||||
- anything else: throw an error for an unexpected character
|
||||
|
||||
The content between special characters, and not consumed by any rules above or below, is marked as a `Text` token.
|
||||
|
||||
After this stage, we have a list of the tokens (with their positions) and text regions.
|
||||
|
||||
## InterpretTokens - from tokens to environments
|
||||
|
||||
This function iterates over the tokens, looking for groups or environments to match. Each environment command is pushed onto the `Environments` array. As part of this process other commands are interpreted in order to skip over them (for example `\newcommand{\foo}{\begin{equation}}` is valid and should not trigger an "unmatched environment" error.). We consume these commands using custom argument functions.
|
||||
|
||||
When we push an entry onto the `Environments` array we also keep track of the math mode (either `null`, `inline`, or `display`), and for some commands the mode of the next argument (`nextGroupMathMode`). For example, `\hbox` has `nextGroupMathMode` false since the next group is its argument `{...}` which will always be text.
|
||||
|
||||
### Custom argument functions
|
||||
|
||||
Tokens may be followed by various arguments, which can be optional. These are consumed by custom functions which look for the correct format:
|
||||
|
||||
- `read1arg`: read an argument `FOO` to a either form of command `\newcommand\FOO...` or `\newcommand{\FOO}...`. Also support optional `*` form `\newcommand*`.
|
||||
- `readLetDefinition`: read a let command (the equals sign is optional) `\let\foo=\bar`, `\let\foo=TOKEN`, `\let\foo\bar`, `\let\foo\TOKEN`.
|
||||
- `read1name`: read an environemt name `FOO` in `\newenvironment{FOO}...`, also handle names like `FOO_BAR`.
|
||||
- `read1filename`: read a filename like `foo_bar.tex` (may include `_`)
|
||||
- `readOptionalParams`: read an optional parameter `[N]` where `N` is a number, used for `\newcommand{\foo}[2]...` meaning 2 parameters. Allow for additional arguments like `[1][key=value,key=value]` and skip over arbitrary arguments `[xxx][yyy][\foo{zzz}]{...` up to the first `{..`
|
||||
- `readOptionalGeneric`: read a single optional parameter `[foo]`
|
||||
- `readOptionalDef`:skip over the optional arguments of a definition `\def\foo#1.#2(#3){this is the macro #1 #2 #3}` to start looking at text immediately after `\def` command.
|
||||
- `readDefinition`: read a definition as in `\newcommand{\FOO}{DEFN}` or `\newcommand{\FOO} {DEF}` (optional whitespace). Look ahead for argument, consuming whitespace, the definition is read looking for balanced `{` ... `}` braces.
|
||||
- `readVerb`: read a verbatim argument `\verb@foo@` or `\verb*@foo@` where `@` is any character except `*` for `\verb`, `foo` is any sequence excluding end-of-line and the delimiter. A space does work for `@`, contrary to latex documentation. Note: this is only an approximation, because we have already tokenised the input stream, and we should really do that taking into account the effect of verb. For example \verb|%| will get confused because % is then a character.
|
||||
- `readUrl`: read a url argument `\url|foo|`, `\url{foo}` Note: this is only an approximation, because we have already tokenised the input stream, so anything after a comment character % on the current line will not be present in the input stream.
|
||||
|
||||
### Token interpretation
|
||||
|
||||
The following tokens trigger special handling, either by starting or closing a group or environment, being a command that must be intepreted (to skip over arguments) or having special properties (such as only being permissible in certain environments).
|
||||
|
||||
- `{` and `}` handle open and close group as a type of environment
|
||||
- `\begin` and `\end` followed by a text token, taken as the environment name
|
||||
- also allow repeated text tokens separated by `_` (e.g like `\begin{new_major_theorem}`)
|
||||
- Parse bracket commands, treated as an environment since they must match
|
||||
- `\left` and `\right` must be followed by one of `(){}[]<>/|\.`
|
||||
- `\(` ... `\)` and `\[` ... `\]` handle open and close math-modes as a type of environment
|
||||
- Parse command definitions in a limited way, to avoid falsely reporting errors from unmatched environments in the command definition e.g. `\newcommand{\foo}{\begin{equation}}` is valid and should not trigger an "unmatched environment" error.
|
||||
- `newcommand`, `renewcommand`, `DeclareRobustCommand`: read1arg readOptionalParams readDefinition
|
||||
- `def`: read1arg readOptionalDef readDefinition
|
||||
- `let`: readLetDefinition
|
||||
- `newcolumntype` read1name readOptionalParams readDefinition
|
||||
- `newenvironment`, `renewenvironment` read1name readOptionalParams readDefinition(open) readDefinition(close)
|
||||
- Parse special commands
|
||||
- `verb`: readVerb (`\verb|....|` where `|` is any char)
|
||||
- `url`: readUrl (`\url{...}` or `\url|....|` where `|` is any char)
|
||||
- `input`: read1filename
|
||||
- Parse text mode commands - the next group will be in text mode regardless
|
||||
- `hbox`, `text`, `mbox`, `footnote`, `intertext`, `shortintertext`, `textnormal`, `tag`, `reflectbox`, `textrm`
|
||||
- Parse graphics commands
|
||||
- `tikz`: readOptionalGeneric
|
||||
- `rotatebox`, `scalebox`, `feynmandiagram`: readOptionalGeneric readDefinition
|
||||
- `resizebox`: readOptionalGeneric readDefinition(width) readDefinition(height)
|
||||
- Parse math definition commands
|
||||
- `DeclareMathOperator`: readDefinition(first arg) readDefinition(second arg)
|
||||
- `DeclarePairedDelimiter`: readDefinition(first arg) readDefinition(second arg) readDefinition(third arg)
|
||||
- Math-mode commands
|
||||
- `(alpha|beta|gamma|delta|epsilon|varepsilon|zeta|eta|theta|vartheta|iota|kappa|lambda|mu|nu|xi|pi|varpi|rho|varrho|sigma|varsigma|tau|upsilon|phi|varphi|chi|psi|omega|Gamma|Delta|Theta|Lambda|Xi|Pi|Sigma|Upsilon|Phi|Psi|Omega)`: can only be used in math mode
|
||||
- Text-mode commands
|
||||
- `(chapter|section|subsection|subsubsection)`: cannot be used in math mode
|
||||
- Any other unrecognised command
|
||||
- `\[a-z]+`: if we see an unknown command \foo{...}{...} put the math mode for the next group into the 'undefined' state, because we do not know what math mode an arbitrary macro will use for its arguments. In the math mode 'undefined' state we don't report errors when we encounter math or text commands.
|
||||
- Math-mode delimiters
|
||||
- `$$` - display math environment
|
||||
- `$` - inline math environment
|
||||
- Subscript and superscript (must be inside math-mode)
|
||||
- `^` and `_`: check for mathmode assuming environments are correct
|
||||
|
||||
### Environment handling
|
||||
|
||||
- must be outside math mode: `(document|figure|center|enumerate|itemize|table|abstract|proof|lemma|theorem|definition|proposition|corollary|remark|notation|thebibliography)`
|
||||
- must be inside math mode: `(array|gathered|split|aligned|alignedat)\*?`
|
||||
- must be outside math mode but starts it: `(math|displaymath|equation|eqnarray|multline|align|gather|flalign|alignat)\*?`
|
||||
- start a verbatim environment: `(verbatim|boxedverbatim|lstlisting|minted|Verbatim)`. Any errors occurring in a verbatim environment are ignored.
|
||||
|
||||
### Special cases
|
||||
|
||||
If we encounter any tokens matching `(be|beq|beqa|bea)` or `(ee|eeq|eeqn|eeqa|eeqan|eea)` we filter out all errors relating to math-mode violations, since these are common user-defined macros to replace `\begin{equation}` etc.
|
||||
@@ -0,0 +1,97 @@
|
||||
import { Diagnostic } from '@codemirror/lint'
|
||||
import { Range } from '../../../utils/range'
|
||||
import { renderMessage } from '@/features/source-editor/extensions/annotations'
|
||||
|
||||
export type LintError = {
|
||||
startPos: number
|
||||
endPos: number
|
||||
pos: number
|
||||
suppressIfEditing: boolean
|
||||
type: 'info' | 'warning' | 'error'
|
||||
text: string
|
||||
}
|
||||
|
||||
// Convert errors generated by the linter to Diagnostic objects that CM6
|
||||
// expects. Filter and adjust positions based on cursor position. This is
|
||||
// adapted from roughly equivalent Ace code:
|
||||
// https://github.com/overleaf/ace/blob/31998cd6178c4115ad67f8e101f41590dcb32522/lib/ace/mode/latex.js#L58-L183
|
||||
export const errorsToDiagnostics = (
|
||||
errors: LintError[],
|
||||
cursorPosition: number,
|
||||
docLength: number
|
||||
): Diagnostic[] => {
|
||||
const diagnostics: Diagnostic[] = []
|
||||
const cursorAtEndOfDocument = cursorPosition === docLength
|
||||
const suppressions = []
|
||||
|
||||
for (const error of errors) {
|
||||
const errorRange = new Range(error.startPos, error.endPos)
|
||||
|
||||
// Reject an error that starts or ends after the document length. This
|
||||
// can happen if the document changed while the linter was running and
|
||||
// would cause an exception if a diagnostic was created for it
|
||||
if (errorRange.from > docLength || errorRange.to > docLength) {
|
||||
continue
|
||||
}
|
||||
|
||||
const cursorInRange = errorRange.contains(cursorPosition)
|
||||
const cursorAtStart = cursorPosition === errorRange.from + 1 // cursor after start not before
|
||||
const cursorAtEnd = cursorPosition === errorRange.to
|
||||
|
||||
// If the user is editing at the beginning or end of this error, suppress
|
||||
// it from display
|
||||
if (error.suppressIfEditing && (cursorAtStart || cursorAtEnd)) {
|
||||
suppressions.push(errorRange)
|
||||
continue
|
||||
}
|
||||
|
||||
// Otherwise, check if this error starts inside a
|
||||
// suppressed error range (it's probably a cascading
|
||||
// error, so we hide it while the user is typing)
|
||||
let isCascadeError = false
|
||||
for (const badRange of suppressions) {
|
||||
if (badRange.intersects(errorRange)) {
|
||||
isCascadeError = true
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
// Hide cascade errors
|
||||
if (isCascadeError) {
|
||||
continue
|
||||
}
|
||||
|
||||
// Adjust the error range if the cursor is inside the range.
|
||||
//
|
||||
// If the cause of the error is at the beginning, move the end of the range
|
||||
// to the cursor position.
|
||||
//
|
||||
// If the cause of the error is at the end, and the cursor is inside the
|
||||
// range, move the beginning of the range to the cursor position.
|
||||
//
|
||||
// If the cursor is at the end of the document, make no adjustment because
|
||||
// doing the regular adjustments doesn't always give intuitive results at
|
||||
// the end of the document.
|
||||
const errorAtStart = error.pos === error.startPos
|
||||
const movableStart =
|
||||
cursorInRange && !errorAtStart && !cursorAtEndOfDocument
|
||||
const movableEnd = cursorInRange && errorAtStart && !cursorAtEndOfDocument
|
||||
const newStart = movableStart ? cursorPosition : errorRange.from
|
||||
const newEnd = movableEnd ? cursorPosition : errorRange.to
|
||||
|
||||
const diagnostic: Diagnostic = {
|
||||
from: newStart,
|
||||
to: newEnd,
|
||||
severity: error.type,
|
||||
message: error.text,
|
||||
}
|
||||
|
||||
// Create the diagnostic
|
||||
diagnostics.push({
|
||||
...diagnostic,
|
||||
renderMessage: () => renderMessage(diagnostic),
|
||||
})
|
||||
}
|
||||
|
||||
return diagnostics
|
||||
}
|
||||
@@ -0,0 +1,86 @@
|
||||
import { EditorView } from '@codemirror/view'
|
||||
import { Diagnostic } from '@codemirror/lint'
|
||||
import { errorsToDiagnostics, LintError } from './errors-to-diagnostics'
|
||||
import { mergeCompatibleOverlappingDiagnostics } from './merge-overlapping-diagnostics'
|
||||
|
||||
const lintWorker = new Worker(
|
||||
/* webpackChunkName: "latex-linter-worker" */
|
||||
new URL('./latex-linter.worker.js', import.meta.url),
|
||||
{ type: 'module' }
|
||||
)
|
||||
|
||||
class Deferred {
|
||||
public promise: Promise<readonly Diagnostic[]>
|
||||
public resolve?: (value: PromiseLike<Diagnostic[]> | Diagnostic[]) => void
|
||||
public reject?: (reason?: any) => void
|
||||
|
||||
constructor() {
|
||||
this.promise = new Promise((resolve, reject) => {
|
||||
this.resolve = resolve
|
||||
this.reject = reject
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
let linterPromise: Promise<any> | null = null // promise which will resolve to results of the current linting
|
||||
let queuedRequest: Deferred | null = null // deferred promise for incoming linting requests while the current one is running
|
||||
let currentView: EditorView | null = null
|
||||
|
||||
let currentResolver: ((value: Diagnostic[]) => void) | null = null
|
||||
|
||||
const runLinter = () => {
|
||||
lintWorker.postMessage({ text: currentView!.state.doc.toString() })
|
||||
return new Promise<Diagnostic[]>(resolve => {
|
||||
currentResolver = resolve
|
||||
})
|
||||
}
|
||||
|
||||
lintWorker!.addEventListener('message', event => {
|
||||
if (event.data) {
|
||||
const errors = event.data.errors as LintError[]
|
||||
const editorState = currentView!.state
|
||||
const doc = editorState.doc
|
||||
const cursorPosition = editorState.selection.main.head
|
||||
const diagnostics = errorsToDiagnostics(errors, cursorPosition, doc.length)
|
||||
const mergedDiagnostics = mergeCompatibleOverlappingDiagnostics(diagnostics)
|
||||
currentResolver!(mergedDiagnostics)
|
||||
// make compile controller aware of lint errors via editor:lint event
|
||||
const hasLintingError = errors.some(e => e.type !== 'info')
|
||||
window.dispatchEvent(
|
||||
new CustomEvent('editor:lint', {
|
||||
detail: { hasLintingError },
|
||||
})
|
||||
)
|
||||
}
|
||||
})
|
||||
|
||||
const executeQueuedAction = (deferred: Deferred) => {
|
||||
runLinter().then(result => deferred.resolve!(result))
|
||||
return deferred.promise
|
||||
}
|
||||
|
||||
const processQueue = () => {
|
||||
if (queuedRequest) {
|
||||
linterPromise = executeQueuedAction(queuedRequest).then(processQueue)
|
||||
queuedRequest = null
|
||||
} else {
|
||||
linterPromise = null
|
||||
}
|
||||
}
|
||||
|
||||
export const latexLinter = (view: EditorView) => {
|
||||
// always update the view, we use it to filter the results to the current buffer
|
||||
currentView = view
|
||||
// if a linting request isn't already running, start it running
|
||||
if (!linterPromise) {
|
||||
linterPromise = runLinter()
|
||||
linterPromise.then(processQueue)
|
||||
return linterPromise
|
||||
} else {
|
||||
// otherwise create a single deferred promise which we will return to all subsequent requests
|
||||
if (!queuedRequest) {
|
||||
queuedRequest = new Deferred()
|
||||
}
|
||||
return queuedRequest.promise
|
||||
}
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,86 @@
|
||||
import { Diagnostic } from '@codemirror/lint'
|
||||
import { Range } from '../../../utils/range'
|
||||
|
||||
const diagnosticsTouchOrOverlap = (d1: Diagnostic, d2: Diagnostic) => {
|
||||
return new Range(d1.from, d1.to).touchesOrIntersects(
|
||||
new Range(d2.from, d2.to)
|
||||
)
|
||||
}
|
||||
|
||||
const mergeDiagnostics = (d1: Diagnostic, d2: Diagnostic) => {
|
||||
const diagnostic: Diagnostic = {
|
||||
from: Math.min(d1.from, d2.from),
|
||||
to: Math.max(d1.to, d2.to),
|
||||
severity: d1.severity,
|
||||
message: d1.message,
|
||||
}
|
||||
if ('source' in d1) {
|
||||
diagnostic.source = d1.source
|
||||
}
|
||||
return diagnostic
|
||||
}
|
||||
|
||||
const mergeOverlappingDiagnostics = (diagnostics: Diagnostic[]) => {
|
||||
const diagnosticsByMessage = new Map()
|
||||
for (const diagnostic of diagnostics) {
|
||||
let diagnosticsForMessage = diagnosticsByMessage.get(diagnostic.message)
|
||||
if (diagnosticsForMessage) {
|
||||
diagnosticsForMessage.push(diagnostic)
|
||||
diagnosticsForMessage.sort(
|
||||
(d1: Diagnostic, d2: Diagnostic) => d1.from - d2.from
|
||||
)
|
||||
for (let i = 1; i < diagnosticsForMessage.length; ) {
|
||||
const d1 = diagnosticsForMessage[i - 1]
|
||||
const d2 = diagnosticsForMessage[i]
|
||||
if (diagnosticsTouchOrOverlap(d1, d2)) {
|
||||
// Merge second diagnostic into first and remove it
|
||||
diagnosticsForMessage[i - 1] = mergeDiagnostics(d1, d2)
|
||||
diagnosticsForMessage.splice(i, 1)
|
||||
} else {
|
||||
++i
|
||||
}
|
||||
}
|
||||
} else {
|
||||
diagnosticsForMessage = [diagnostic]
|
||||
diagnosticsByMessage.set(diagnostic.message, diagnosticsForMessage)
|
||||
}
|
||||
}
|
||||
return Array.from(diagnosticsByMessage.values()).flat()
|
||||
}
|
||||
|
||||
// Group objects of a specified type by a single property and return an array
|
||||
// of arrays, one array per property value
|
||||
const groupBy = function <T>(arr: T[], prop: keyof T) {
|
||||
const grouped = new Map<T[keyof T], T[]>()
|
||||
for (const item of arr) {
|
||||
const key = item[prop]
|
||||
let group = grouped.get(key)
|
||||
if (!group) {
|
||||
group = [] as T[]
|
||||
grouped.set(key, group)
|
||||
}
|
||||
group.push(item)
|
||||
}
|
||||
return Array.from(grouped.values())
|
||||
}
|
||||
|
||||
export const mergeCompatibleOverlappingDiagnostics = (
|
||||
diagnostics: Diagnostic[]
|
||||
) => {
|
||||
const allMergedDiagnostics = []
|
||||
|
||||
// Partition by diagnostic source (compiler or linter)
|
||||
for (const diagnosticsForSource of groupBy(diagnostics, 'source')) {
|
||||
// Partition into severities
|
||||
const diagnosticsBySeverity = groupBy(diagnosticsForSource, 'severity')
|
||||
|
||||
// Merge overlapping diagnostics for each severity in turn
|
||||
for (const diagnosticsForSeverity of diagnosticsBySeverity) {
|
||||
allMergedDiagnostics.push(
|
||||
...mergeOverlappingDiagnostics(diagnosticsForSeverity)
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
return allMergedDiagnostics
|
||||
}
|
||||
@@ -0,0 +1,5 @@
|
||||
import { latexLinter } from './linter/latex-linter'
|
||||
import { lintSourceConfig } from '../../extensions/annotations'
|
||||
import { createLinter } from '../../extensions/linting'
|
||||
|
||||
export const linting = () => createLinter(latexLinter, lintSourceConfig)
|
||||
@@ -0,0 +1,45 @@
|
||||
import { EditorView } from '@codemirror/view'
|
||||
import { Transaction, Text } from '@codemirror/state'
|
||||
|
||||
const metadataChangeRe = /\\(documentclass|usepackage|RequirePackage|label)\b/
|
||||
const optionChangeRe = /\b(label)=/
|
||||
|
||||
export const metadata = () => [
|
||||
// trigger metadata reload if edited line contains metadata-related commands
|
||||
EditorView.updateListener.of(update => {
|
||||
if (update.docChanged) {
|
||||
let needsMetadataUpdate = false
|
||||
|
||||
for (const transaction of update.transactions) {
|
||||
// ignore remote changes
|
||||
if (transaction.annotation(Transaction.remote)) {
|
||||
continue
|
||||
}
|
||||
|
||||
transaction.changes.iterChangedRanges((fromA, toA, fromB, toB) => {
|
||||
const docs: [Text, number, number][] = [
|
||||
[update.startState.doc, fromA, toA],
|
||||
[update.state.doc, fromB, toB],
|
||||
]
|
||||
|
||||
for (const [doc, from, to] of docs) {
|
||||
const fromLine = doc.lineAt(from).number
|
||||
const toLine = doc.lineAt(to).number
|
||||
|
||||
for (const line of doc.iterLines(fromLine, toLine + 1)) {
|
||||
if (metadataChangeRe.test(line) || optionChangeRe.test(line)) {
|
||||
needsMetadataUpdate = true
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
if (needsMetadataUpdate) {
|
||||
window.dispatchEvent(new CustomEvent('editor:metadata-outdated'))
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
}),
|
||||
]
|
||||
@@ -0,0 +1,17 @@
|
||||
import { EditorView } from '@codemirror/view'
|
||||
import { startCompletion } from '@codemirror/autocomplete'
|
||||
import { Transaction } from '@codemirror/state'
|
||||
import { isInEmptyArgumentNodeForAutocomplete } from '../../utils/tree-query'
|
||||
|
||||
// start autocompletion when the cursor enters an empty pair of braces
|
||||
export const openAutocomplete = () => {
|
||||
return EditorView.updateListener.of(update => {
|
||||
if (update.selectionSet || update.docChanged) {
|
||||
if (!update.transactions.some(tr => tr.annotation(Transaction.remote))) {
|
||||
if (isInEmptyArgumentNodeForAutocomplete(update.state)) {
|
||||
startCompletion(update.view)
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
@@ -0,0 +1,22 @@
|
||||
import { Prec } from '@codemirror/state'
|
||||
import { keymap } from '@codemirror/view'
|
||||
import { toggleRanges } from '../../commands/ranges'
|
||||
|
||||
export const shortcuts = () => {
|
||||
return Prec.high(
|
||||
keymap.of([
|
||||
{
|
||||
key: 'Ctrl-b',
|
||||
mac: 'Mod-b',
|
||||
preventDefault: true,
|
||||
run: toggleRanges('\\textbf'),
|
||||
},
|
||||
{
|
||||
key: 'Ctrl-i',
|
||||
mac: 'Mod-i',
|
||||
preventDefault: true,
|
||||
run: toggleRanges('\\textit'),
|
||||
},
|
||||
])
|
||||
)
|
||||
}
|
||||
@@ -0,0 +1,8 @@
|
||||
// 1. Convert from Ace `$1` to CodeMirror numbered placeholder format `${1}` or `#{1}` in snippets.
|
||||
// Note: metadata from the server still uses the old format, so it's not enough to convert all
|
||||
// the bundled data to the new format.
|
||||
// 2. Add a final placeholder at the end of the snippet to allow for
|
||||
// shift-tabbing back from the penultimate placeholder. See #8697.
|
||||
export const prepareSnippetTemplate = (template: string): string => {
|
||||
return template.replace(/\$(\d+)/g, '#{$1}') + '${}'
|
||||
}
|
||||
@@ -0,0 +1,31 @@
|
||||
import { markdown as markdownLanguage } from '@codemirror/lang-markdown'
|
||||
import { shortcuts } from './shortcuts'
|
||||
import { languages } from '../index'
|
||||
import { Strikethrough } from '@lezer/markdown'
|
||||
import {
|
||||
HighlightStyle,
|
||||
LanguageSupport,
|
||||
syntaxHighlighting,
|
||||
} from '@codemirror/language'
|
||||
import { tags } from '@lezer/highlight'
|
||||
|
||||
export const markdown = () => {
|
||||
const { language, support } = markdownLanguage({
|
||||
codeLanguages: languages,
|
||||
extensions: [Strikethrough],
|
||||
})
|
||||
|
||||
return new LanguageSupport(language, [
|
||||
support,
|
||||
shortcuts(),
|
||||
syntaxHighlighting(markdownHighlightStyle),
|
||||
])
|
||||
}
|
||||
|
||||
const markdownHighlightStyle = HighlightStyle.define([
|
||||
{ tag: tags.link, textDecoration: 'underline' },
|
||||
{ tag: tags.heading, textDecoration: 'underline', fontWeight: 'bold' },
|
||||
{ tag: tags.emphasis, fontStyle: 'italic' },
|
||||
{ tag: tags.strong, fontWeight: 'bold' },
|
||||
{ tag: tags.strikethrough, textDecoration: 'line-through' },
|
||||
])
|
||||
@@ -0,0 +1,22 @@
|
||||
import { Prec } from '@codemirror/state'
|
||||
import { keymap } from '@codemirror/view'
|
||||
import { wrapRanges } from '../../commands/ranges'
|
||||
|
||||
export const shortcuts = () => {
|
||||
return Prec.high(
|
||||
keymap.of([
|
||||
{
|
||||
key: 'Ctrl-b',
|
||||
mac: 'Mod-b',
|
||||
preventDefault: true,
|
||||
run: wrapRanges('**', '**'),
|
||||
},
|
||||
{
|
||||
key: 'Ctrl-i',
|
||||
mac: 'Mod-i',
|
||||
preventDefault: true,
|
||||
run: wrapRanges('_', '_'),
|
||||
},
|
||||
])
|
||||
)
|
||||
}
|
||||
Reference in New Issue
Block a user