Skip to content

Commit

Permalink
Merge pull request #3934 from James-Yu/unified-latex
Browse files Browse the repository at this point in the history
Unified-latex based cache
  • Loading branch information
James-Yu committed Jun 19, 2023
2 parents 2dea48a + 7429229 commit f1a847d
Show file tree
Hide file tree
Showing 24 changed files with 685 additions and 1,274 deletions.
19 changes: 0 additions & 19 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -1382,13 +1382,6 @@
],
"markdownDescription": "The names of the commands to be shown in the outline/structure views. The commands must be called in the form `\\commandname{arg}`."
},
"latex-workshop.view.outline.fastparse.enabled": {
"scope": "window",
"type": "boolean",
"default": true,
"deprecationMessage": "Deprecated: This config has been renamed to `latex-workshop.intellisense.fastparse.enabled`.",
"markdownDeprecationMessage": "**Deprecated**: This config has been renamed to `#latex-workshop.intellisense.fastparse.enabled#`."
},
"latex-workshop.view.outline.floats.enabled": {
"scope": "window",
"type": "boolean",
Expand Down Expand Up @@ -1804,12 +1797,6 @@
"default": 1000,
"markdownDescription": "The minimal time interval between two consecutive runs of `texcount` in milliseconds when `#latex-workshop.texcount.run#` is set to `onSave`."
},
"latex-workshop.intellisense.fastparse.enabled": {
"scope": "window",
"type": "boolean",
"default": true,
"markdownDescription": "Use fast LaTeX parsing algorithm to build outline/structure. This is done by inherently removing texts and comments before building AST. Enabling this will not tamper the document, but may result in incomplete outline/structure."
},
"latex-workshop.intellisense.update.aggressive.enabled": {
"scope": "window",
"type": "boolean",
Expand Down Expand Up @@ -1975,12 +1962,6 @@
],
"markdownDescription": "The name of LaTeX commands that indicates a label definition. The command must accept one mandatory argument of the label reference string, e.g, \\linelabel{ref-str}."
},
"latex-workshop.intellisense.label.keyval": {
"scope": "window",
"type": "boolean",
"default": true,
"markdownDescription": "Scan for labels defined as `label={some tex}` to add to the reference intellisense menu. The braces are mandatory."
},
"latex-workshop.intellisense.unimathsymbols.enabled": {
"scope": "window",
"type": "boolean",
Expand Down
148 changes: 74 additions & 74 deletions src/components/cacher.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
import * as vscode from 'vscode'
import * as fs from 'fs'
import * as path from 'path'
import { latexParser } from 'latex-utensils'
import type * as Ast from '@unified-latex/unified-latex-types'
import * as lw from '../lw'
import * as eventbus from './eventbus'
Expand All @@ -20,9 +19,13 @@ import { performance } from 'perf_hooks'

const logger = getLogger('Cacher')

interface Cache {
export interface Cache {
/** The raw file path of this Cache. */
filePath: string,
/** Cached content of file. Dirty if opened in vscode, disk otherwise */
content: string | undefined,
content: string,
/** Cached trimmed content of `content`. */
contentTrimmed: string,
/** Completion items */
elements: {
/** \ref{} items */
Expand Down Expand Up @@ -50,8 +53,6 @@ interface Cache {
/** A dictionary of external documents provided by `\externaldocument` of
* `xr` package. The value is its prefix `\externaldocument[prefix]{*}` */
external: {[filePath: string]: string},
/** The AST of this file, generated by latex-utensils */
luAst?: latexParser.LatexAst,
/** The AST of this file, generated by unified-latex */
ast?: Ast.Root
}
Expand Down Expand Up @@ -101,6 +102,22 @@ export class Cacher {
return this.promises[filePath]
}

async wait(filePath: string, seconds = 2) {
let waited = 0
while (!this.promise(filePath) && !this.has(filePath)) {
// Just open vscode, has not cached, wait for a bit?
await new Promise(resolve => setTimeout(resolve, 100))
waited++
if (waited >= seconds * 10) {
// Waited for two seconds before starting cache. Really?
logger.log(`Error loading cache: ${filePath} . Forcing.`)
await this.refreshCache(filePath)
break
}
}
return this.promise(filePath)
}

get allPromises() {
return Object.values(this.promises)
}
Expand All @@ -126,19 +143,22 @@ export class Cacher {
}
logger.log(`Caching ${filePath} .`)
this.caching++
const content = lw.lwfs.readFileSyncGracefully(filePath)
this.caches[filePath] = {content, elements: {}, children: [], bibfiles: new Set(), external: {}}
if (content === undefined) {
logger.log(`Cannot read ${filePath} .`)
return
}
const contentTrimmed = utils.stripCommentsAndVerbatim(content)
const content = lw.lwfs.readFileSyncGracefully(filePath) ?? ''
const cache: Cache = {
filePath,
content,
contentTrimmed: utils.stripCommentsAndVerbatim(content),
elements: {},
children: [],
bibfiles: new Set(),
external: {}}
this.caches[filePath] = cache
rootPath = rootPath || lw.manager.rootFile
this.updateChildren(filePath, rootPath, contentTrimmed)
this.updateChildren(cache, rootPath)

this.promises[filePath] = this.updateAST(filePath, content).then(() => {
this.updateElements(filePath, content, contentTrimmed)
this.updateBibfiles(filePath, contentTrimmed)
this.promises[filePath] = this.updateAST(cache).then(() => {
this.updateElements(cache)
this.updateBibfiles(cache)
}).finally(() => {
logger.log(`Cached ${filePath} .`)
this.caching--
Expand All @@ -153,40 +173,29 @@ export class Cacher {
return this.promises[filePath]
}

private async updateAST(filePath: string, content: string) {
const configuration = vscode.workspace.getConfiguration('latex-workshop')
const fastparse = configuration.get('intellisense.fastparse.enabled') as boolean
logger.log('Parse LaTeX AST ' + (fastparse ? 'with fast-parse: ' : ': ') + filePath + ' .')

let start = performance.now()
const strippedText = utils.stripText(content)
const ast = await parser.parseLatex(fastparse ? strippedText : content)
logger.log(`Parsed LaTeX AST with LU in ${(performance.now() - start).toFixed(2)} ms: ${filePath} .`)

const cache = this.get(filePath)
if (cache) {
start = performance.now()
cache.ast = parser.unifiedParse(content)
logger.log(`Parsed LaTeX AST in ${(performance.now() - start).toFixed(2)} ms: ${filePath} .`)
}
if (ast && cache) {
cache.luAst = ast
} else {
logger.log(ast === undefined ? 'Failed parsing LaTeX AST.' : `Cannot get cache for ${filePath} .`)
}
private async updateAST(cache: Cache): Promise<void> {
logger.log(`Parse LaTeX AST: ${cache.filePath} .`)
const start = performance.now()
return new Promise((resolve, _) => {
setTimeout(() => {
cache.ast = parser.unifiedParse(cache.content)
logger.log(`Parsed LaTeX AST in ${(performance.now() - start).toFixed(2)} ms: ${cache.filePath} .`)
resolve()
}, 0)
})
}

private updateChildren(filePath: string, rootPath: string | undefined, contentTrimmed: string) {
rootPath = rootPath || filePath
this.updateChildrenInput(filePath, rootPath, contentTrimmed)
this.updateChildrenXr(filePath, rootPath, contentTrimmed)
logger.log(`Updated inputs of ${filePath} .`)
private updateChildren(cache: Cache, rootPath: string | undefined) {
rootPath = rootPath || cache.filePath
this.updateChildrenInput(cache, rootPath)
this.updateChildrenXr(cache, rootPath)
logger.log(`Updated inputs of ${cache.filePath} .`)
}

private updateChildrenInput(filePath: string, rootPath: string , contentTrimmed: string) {
private updateChildrenInput(cache: Cache, rootPath: string) {
const inputFileRegExp = new InputFileRegExp()
while (true) {
const result = inputFileRegExp.exec(contentTrimmed, filePath, rootPath)
const result = inputFileRegExp.exec(cache.contentTrimmed, cache.filePath, rootPath)
if (!result) {
break
}
Expand All @@ -199,7 +208,7 @@ export class Cacher {
index: result.match.index,
filePath: result.path
})
logger.log(`Input ${result.path} from ${filePath} .`)
logger.log(`Input ${result.path} from ${cache.filePath} .`)

if (this.src.has(result.path)) {
continue
Expand All @@ -209,24 +218,24 @@ export class Cacher {
}
}

private updateChildrenXr(filePath: string, rootPath: string , contentTrimmed: string) {
private updateChildrenXr(cache: Cache, rootPath: string) {
const externalDocRegExp = /\\externaldocument(?:\[(.*?)\])?\{(.*?)\}/g
while (true) {
const result = externalDocRegExp.exec(contentTrimmed)
const result = externalDocRegExp.exec(cache.contentTrimmed)
if (!result) {
break
}

const texDirs = vscode.workspace.getConfiguration('latex-workshop').get('latex.texDirs') as string[]
const externalPath = utils.resolveFile([path.dirname(filePath), path.dirname(rootPath), ...texDirs], result[2])
const externalPath = utils.resolveFile([path.dirname(cache.filePath), path.dirname(rootPath), ...texDirs], result[2])
if (!externalPath || !fs.existsSync(externalPath) || path.relative(externalPath, rootPath) === '') {
logger.log(`Failed resolving external ${result[2]} . Tried ${externalPath} ` +
(externalPath && path.relative(externalPath, rootPath) === '' ? ', which is root.' : '.'))
continue
}

this.caches[rootPath].external[externalPath] = result[1] || ''
logger.log(`External document ${externalPath} from ${filePath} .` +
logger.log(`External document ${externalPath} from ${cache.filePath} .` +
(result[1] ? ` Prefix is ${result[1]}`: ''))

if (this.src.has(externalPath)) {
Expand All @@ -237,50 +246,41 @@ export class Cacher {
}
}

private updateElements(filePath: string, content: string, contentTrimmed: string) {
lw.completer.citation.update(filePath, content)
const cache = this.get(filePath)
if (cache?.luAst) {
const nodes = cache.luAst.content
const lines = content.split('\n')
lw.completer.reference.update(filePath, nodes, lines)
lw.completer.glossary.update(filePath, nodes)
lw.completer.environment.update(filePath, nodes, lines)
lw.completer.command.update(filePath, nodes)
} else {
logger.log(`Use RegExp to update elements of ${filePath} .`)
lw.completer.reference.update(filePath, undefined, undefined, contentTrimmed)
lw.completer.glossary.update(filePath, undefined, contentTrimmed)
lw.completer.environment.update(filePath, undefined, undefined, contentTrimmed)
lw.completer.command.update(filePath, undefined, contentTrimmed)
}
lw.duplicateLabels.run(filePath)
logger.log(`Updated elements of ${filePath} .`)
private updateElements(cache: Cache) {
lw.completer.citation.update(cache.filePath, cache.content)
// Package parsing must be before command and environment.
lw.completer.package.parse(cache)
lw.completer.reference.parse(cache)
lw.completer.glossary.parse(cache)
lw.completer.environment.parse(cache)
lw.completer.command.parse(cache)
lw.duplicateLabels.run(cache.filePath)
logger.log(`Updated elements of ${cache.filePath} .`)
}

private updateBibfiles(filePath: string, contentTrimmed: string) {
private updateBibfiles(cache: Cache) {
const bibReg = /(?:\\(?:bibliography|addbibresource)(?:\[[^[\]{}]*\])?){(.+?)}|(?:\\putbib)\[(.+?)\]/g
while (true) {
const result = bibReg.exec(contentTrimmed)
const result = bibReg.exec(cache.contentTrimmed)
if (!result) {
break
}

const bibs = (result[1] ? result[1] : result[2]).split(',').map(bib => bib.trim())

for (const bib of bibs) {
const bibPath = PathUtils.resolveBibPath(bib, path.dirname(filePath))
const bibPath = PathUtils.resolveBibPath(bib, path.dirname(cache.filePath))
if (bibPath === undefined) {
continue
}
this.caches[filePath].bibfiles.add(bibPath)
logger.log(`Bib ${bibPath} from ${filePath} .`)
cache.bibfiles.add(bibPath)
logger.log(`Bib ${bibPath} from ${cache.filePath} .`)
if (!this.bib.has(bibPath)) {
this.bib.add(bibPath)
}
}
}
logger.log(`Updated bibs of ${filePath} .`)
logger.log(`Updated bibs of ${cache.filePath} .`)
}

/**
Expand Down
Loading

0 comments on commit f1a847d

Please sign in to comment.