First commit
This commit is contained in:
commit
42df9465cf
13
.babelrc
Normal file
13
.babelrc
Normal file
|
@ -0,0 +1,13 @@
|
||||||
|
{
|
||||||
|
"presets": [
|
||||||
|
[
|
||||||
|
"@babel/preset-env",
|
||||||
|
{
|
||||||
|
"targets": {
|
||||||
|
"node": "current"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"@babel/preset-typescript",
|
||||||
|
]
|
||||||
|
}
|
5
.editorconfig
Normal file
5
.editorconfig
Normal file
|
@ -0,0 +1,5 @@
|
||||||
|
root = true
|
||||||
|
|
||||||
|
[*]
|
||||||
|
indent_style = space
|
||||||
|
indent_size = 2
|
32
.github/ISSUE_TEMPLATE/bug_report.md
vendored
Normal file
32
.github/ISSUE_TEMPLATE/bug_report.md
vendored
Normal file
|
@ -0,0 +1,32 @@
|
||||||
|
---
|
||||||
|
name: Bug report
|
||||||
|
about: Create a report to help us improve
|
||||||
|
title: "[BUG] "
|
||||||
|
labels: ''
|
||||||
|
assignees: ''
|
||||||
|
---
|
||||||
|
|
||||||
|
<!-- PLEASE FILL THIS TEMPLATE -->
|
||||||
|
<!-- If you don't fill it, I'll ask you to fill it before looking at your issue -->
|
||||||
|
|
||||||
|
**Problem description:**
|
||||||
|
|
||||||
|
<!-- Describe your problem in details. -->
|
||||||
|
<!-- If applicable, you can copy-paste the console logs (ctrl+shift+i in Obsidian) -->
|
||||||
|
<!-- and attach screenshots -->
|
||||||
|
|
||||||
|
**Your environment:**
|
||||||
|
|
||||||
|
<!-- Please, if possible and before filing an issue, -->
|
||||||
|
<!-- make sure that you have the latest available version of Omnisearch. -->
|
||||||
|
|
||||||
|
- Omnisearch version:
|
||||||
|
- Obsidian version:
|
||||||
|
- Operating system:
|
||||||
|
- Number of indexed documents in your vault (approx.):
|
||||||
|
|
||||||
|
**Things to try:**
|
||||||
|
|
||||||
|
- Does the problem occur when Omnisearch is the only active community plugin:
|
||||||
|
- Does the problem occur when you _don't_ index PDFs, images, or other non-notes files:
|
||||||
|
- Does the problem occur after a cache reset:
|
20
.github/ISSUE_TEMPLATE/feature_request.md
vendored
Normal file
20
.github/ISSUE_TEMPLATE/feature_request.md
vendored
Normal file
|
@ -0,0 +1,20 @@
|
||||||
|
---
|
||||||
|
name: Feature request
|
||||||
|
about: Suggest an idea for this project
|
||||||
|
title: "[Feature request]"
|
||||||
|
labels: ''
|
||||||
|
assignees: ''
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
**Is your feature request related to a problem? Please describe.**
|
||||||
|
<!-- A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] -->
|
||||||
|
|
||||||
|
**Describe the solution you'd like**
|
||||||
|
<!-- A clear and concise description of what you want to happen. -->
|
||||||
|
|
||||||
|
**Describe alternatives you've considered**
|
||||||
|
<!-- A clear and concise description of any alternative solutions or features you've considered. -->
|
||||||
|
|
||||||
|
**Additional context**
|
||||||
|
<!-- Add any other context or screenshots about the feature request here. -->
|
110
.github/workflows/release.yml
vendored
Normal file
110
.github/workflows/release.yml
vendored
Normal file
|
@ -0,0 +1,110 @@
|
||||||
|
name: Release Obsidian plugin
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
tags:
|
||||||
|
- "*"
|
||||||
|
|
||||||
|
env:
|
||||||
|
PLUGIN_NAME: omnisearch
|
||||||
|
DIST_FOLDER: ./dist
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- uses: pnpm/action-setup@v4
|
||||||
|
with:
|
||||||
|
run_install: true
|
||||||
|
|
||||||
|
- name: Use Node.js
|
||||||
|
uses: actions/setup-node@v4
|
||||||
|
with:
|
||||||
|
cache: 'pnpm'
|
||||||
|
node-version: "20.x"
|
||||||
|
|
||||||
|
- name: Build
|
||||||
|
id: build
|
||||||
|
run: |
|
||||||
|
pnpm install
|
||||||
|
pnpm run build
|
||||||
|
mkdir ${{ env.PLUGIN_NAME }}
|
||||||
|
cp ${{ env.DIST_FOLDER }}/* ${{ env.PLUGIN_NAME }}
|
||||||
|
zip -r ${{ env.PLUGIN_NAME }}.zip ${{ env.PLUGIN_NAME }}
|
||||||
|
ls
|
||||||
|
echo "::set-output name=tag_name::$(git tag --sort version:refname | tail -n 1)"
|
||||||
|
|
||||||
|
- name: Generate a changelog
|
||||||
|
uses: orhun/git-cliff-action@v3
|
||||||
|
id: git-cliff
|
||||||
|
with:
|
||||||
|
config: cliff.toml
|
||||||
|
args: -vv --latest --strip header
|
||||||
|
env:
|
||||||
|
GITHUB_REPO: ${{ github.repository }}
|
||||||
|
|
||||||
|
- name: Print the changelog
|
||||||
|
run: cat "${{ steps.git-cliff.outputs.changelog }}"
|
||||||
|
|
||||||
|
- name: Create Release
|
||||||
|
id: create_release
|
||||||
|
uses: actions/create-release@v1
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
VERSION: ${{ github.ref }}
|
||||||
|
with:
|
||||||
|
tag_name: ${{ github.ref }}
|
||||||
|
release_name: ${{ github.ref }}
|
||||||
|
body: ${{ steps.git-cliff.outputs.content }}
|
||||||
|
draft: true
|
||||||
|
prerelease: false
|
||||||
|
|
||||||
|
- name: Upload zip file
|
||||||
|
id: upload-zip
|
||||||
|
uses: actions/upload-release-asset@v1
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
with:
|
||||||
|
upload_url: ${{ steps.create_release.outputs.upload_url }}
|
||||||
|
asset_path: ./${{ env.PLUGIN_NAME }}.zip
|
||||||
|
asset_name: ${{ env.PLUGIN_NAME }}-${{ steps.build.outputs.tag_name }}.zip
|
||||||
|
asset_content_type: application/zip
|
||||||
|
|
||||||
|
- name: Upload main.js
|
||||||
|
id: upload-main
|
||||||
|
uses: actions/upload-release-asset@v1
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
with:
|
||||||
|
upload_url: ${{ steps.create_release.outputs.upload_url }}
|
||||||
|
asset_path: ${{ env.DIST_FOLDER }}/main.js
|
||||||
|
asset_name: main.js
|
||||||
|
asset_content_type: text/javascript
|
||||||
|
|
||||||
|
- name: Upload manifest.json
|
||||||
|
id: upload-manifest
|
||||||
|
uses: actions/upload-release-asset@v1
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
with:
|
||||||
|
upload_url: ${{ steps.create_release.outputs.upload_url }}
|
||||||
|
asset_path: ./manifest.json
|
||||||
|
asset_name: manifest.json
|
||||||
|
asset_content_type: application/json
|
||||||
|
|
||||||
|
- name: Upload styles.css
|
||||||
|
id: upload-css
|
||||||
|
uses: actions/upload-release-asset@v1
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
with:
|
||||||
|
upload_url: ${{ steps.create_release.outputs.upload_url }}
|
||||||
|
asset_path: ${{ env.DIST_FOLDER }}/styles.css
|
||||||
|
asset_name: styles.css
|
||||||
|
asset_content_type: text/css
|
||||||
|
|
24
.gitignore
vendored
Normal file
24
.gitignore
vendored
Normal file
|
@ -0,0 +1,24 @@
|
||||||
|
# vscode
|
||||||
|
.vscode
|
||||||
|
|
||||||
|
# Intellij
|
||||||
|
*.iml
|
||||||
|
.idea
|
||||||
|
|
||||||
|
# npm
|
||||||
|
node_modules
|
||||||
|
|
||||||
|
# Exclude sourcemaps
|
||||||
|
*.map
|
||||||
|
|
||||||
|
# obsidian
|
||||||
|
data.json
|
||||||
|
|
||||||
|
# Exclude macOS Finder (System Explorer) View States
|
||||||
|
.DS_Store
|
||||||
|
|
||||||
|
dist
|
||||||
|
.pnpm-debug.log
|
||||||
|
coverage
|
||||||
|
package-lock.json
|
||||||
|
Doc Omnisearch/.obsidian
|
13
.prettierrc.js
Normal file
13
.prettierrc.js
Normal file
|
@ -0,0 +1,13 @@
|
||||||
|
// prettier.config.js or .prettierrc.js
|
||||||
|
module.exports = {
|
||||||
|
plugins: ['prettier-plugin-svelte'],
|
||||||
|
trailingComma: 'es5',
|
||||||
|
tabWidth: 2,
|
||||||
|
semi: false,
|
||||||
|
singleQuote: true,
|
||||||
|
arrowParens: 'avoid',
|
||||||
|
bracketSameLine: true,
|
||||||
|
svelteBracketNewLine: false,
|
||||||
|
svelteAllowShorthand: true,
|
||||||
|
svelteIndentScriptAndStyle: true,
|
||||||
|
}
|
243
CHANGELOG.md
Normal file
243
CHANGELOG.md
Normal file
|
@ -0,0 +1,243 @@
|
||||||
|
# Omnisearch Changelog
|
||||||
|
|
||||||
|
This changelog is not exhaustive.
|
||||||
|
|
||||||
|
## 1.27.x
|
||||||
|
|
||||||
|
- Updated Svelte from v3 to v5
|
||||||
|
- Highlighting improvements
|
||||||
|
|
||||||
|
## 1.26.x
|
||||||
|
|
||||||
|
- Allow `# headings` as display titles in search results
|
||||||
|
- Added an experimental recency boost
|
||||||
|
- Added lazy loading for the vault modal items
|
||||||
|
|
||||||
|
## 1.25.x
|
||||||
|
|
||||||
|
- Added basic support for embed references in Vault Search results
|
||||||
|
- Added support for [Iconize](https://github.com/FlorianWoelki/obsidian-iconize)
|
||||||
|
- Weights are now 1-10 (instead of 1-5)
|
||||||
|
- Small performance improvements
|
||||||
|
|
||||||
|
## 1.24.x
|
||||||
|
|
||||||
|
- Added support for [AI Image Analyzer](https://github.com/Swaggeroo/obsidian-ai-image-analyzer)
|
||||||
|
|
||||||
|
## 1.23.x
|
||||||
|
|
||||||
|
- Updated Chinese tokenizer
|
||||||
|
- Added user-defined boosted fields
|
||||||
|
- No more freezes when loading large caches (hopefully)
|
||||||
|
- Large refactoring to properly clean up several older warnings
|
||||||
|
|
||||||
|
## 1.22.x
|
||||||
|
|
||||||
|
- Improved highlighting
|
||||||
|
|
||||||
|
## 1.21.x
|
||||||
|
|
||||||
|
- Added support for .docx and .xlsx
|
||||||
|
|
||||||
|
## 1.20.x
|
||||||
|
|
||||||
|
- Refactored indexing tokenization process to correctly take diacritics into account
|
||||||
|
- Added highlighting in the note's path
|
||||||
|
- Improved the selection of the chosen excerpt in the results list
|
||||||
|
|
||||||
|
## 1.19.x
|
||||||
|
|
||||||
|
- Various bugfixes and improvements
|
||||||
|
|
||||||
|
## 1.18.x
|
||||||
|
|
||||||
|
- Added a localhost server to use Omnisearch's API from outside Obsidian
|
||||||
|
|
||||||
|
## 1.17.x
|
||||||
|
|
||||||
|
- Added a shortcut to open files without closing Omnisearch
|
||||||
|
- Prefill the search field with selected text
|
||||||
|
- Improved highlighting
|
||||||
|
|
||||||
|
## 1.16.x
|
||||||
|
|
||||||
|
- Various indexing/tokenization improvements
|
||||||
|
|
||||||
|
## 1.15.x
|
||||||
|
|
||||||
|
- Added support of webp images
|
||||||
|
- Configurable fuzziness
|
||||||
|
- Added support for DataLoom plugin files
|
||||||
|
- Unsupported files are now indexed by their path
|
||||||
|
- Unmarked tags are now slightly boosted
|
||||||
|
|
||||||
|
## 1.14.x
|
||||||
|
|
||||||
|
- Added a `path:` option
|
||||||
|
- Bugfixes
|
||||||
|
|
||||||
|
## 1.13.x
|
||||||
|
|
||||||
|
- CamelCaseWords are now indexed as 3 words
|
||||||
|
- Reduced search freezes in some cases
|
||||||
|
|
||||||
|
## 1.12.x
|
||||||
|
|
||||||
|
- You can filter files by their extension
|
||||||
|
- Refreshed UI
|
||||||
|
- New API functions
|
||||||
|
- Fixed some tokenization issues
|
||||||
|
|
||||||
|
## 1.10.x - 1.11.x
|
||||||
|
|
||||||
|
- Added support for Text Extractor; Omnisearch no longer extracts text itself
|
||||||
|
- Added canvas indexing
|
||||||
|
- Improved tags indexing
|
||||||
|
|
||||||
|
## 1.9.x
|
||||||
|
|
||||||
|
- PDFs are no longer indexed on mobile
|
||||||
|
- Performance improvements
|
||||||
|
- Various bugfixes
|
||||||
|
|
||||||
|
## 1.8.x
|
||||||
|
|
||||||
|
- Added OCR for images
|
||||||
|
- OCR and PDF indexing are now restricted to desktop. They either don't work or consume too much resources during indexing of big vaults. Too many headaches.
|
||||||
|
- Notes caching is deactivated on iOS because of crashes: memory usage too high during (de)serializing.
|
||||||
|
- Added an URL scheme for integration with external tools: `obsidian://omnisearch?query=foo bar`
|
||||||
|
|
||||||
|
## 1.7.x
|
||||||
|
|
||||||
|
### New
|
||||||
|
|
||||||
|
- PDF Indexing https://github.com/scambier/obsidian-omnisearch/issues/58
|
||||||
|
|
||||||
|
### Improved
|
||||||
|
|
||||||
|
- Code refactor to better scale and handle PDFs as smoothly as possible
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
|
||||||
|
- Search history https://github.com/scambier/obsidian-omnisearch/issues/104
|
||||||
|
- Text in search input was not always correctly selected https://github.com/scambier/obsidian-omnisearch/issues/105
|
||||||
|
- Padding issue https://github.com/scambier/obsidian-omnisearch/issues/113
|
||||||
|
|
||||||
|
### Removed
|
||||||
|
|
||||||
|
- Caching data https://github.com/scambier/obsidian-omnisearch/issues/92#issuecomment-1287647725
|
||||||
|
|
||||||
|
|
||||||
|
## 1.6.x
|
||||||
|
|
||||||
|
### New
|
||||||
|
|
||||||
|
- Omnisearch can now index other plaintext files ~~and PDFs~~ https://github.com/scambier/obsidian-omnisearch/issues/58
|
||||||
|
- Search history, navigable with <code>alt+up/down</code> https://github.com/scambier/obsidian-omnisearch/issues/90
|
||||||
|
- Added a setting to toggle the visibility of the note excerpt in results https://github.com/scambier/obsidian-omnisearch/issues/70
|
||||||
|
|
||||||
|
### Improved
|
||||||
|
|
||||||
|
- You can now create a new note in a new pane https://github.com/scambier/obsidian-omnisearch/issues/87
|
||||||
|
- Added a setting to show a "create note" button https://github.com/scambier/obsidian-omnisearch/issues/96
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
|
||||||
|
- Fixed layout issues https://github.com/scambier/obsidian-omnisearch/issues/97
|
||||||
|
|
||||||
|
## 1.5.x
|
||||||
|
|
||||||
|
### New
|
||||||
|
|
||||||
|
* Added a toggleable sidebar button to open Omnisearch: https://github.com/scambier/obsidian-omnisearch/issues/60
|
||||||
|
* Added a cache-clearing mechanism in case of corruption: https://github.com/scambier/obsidian-omnisearch/issues/83
|
||||||
|
|
||||||
|
### Improved
|
||||||
|
|
||||||
|
* Notes created by Omnisearch now honour the default note location https://github.com/scambier/obsidian-omnisearch/pull/81
|
||||||
|
* Ctrl+click now opens the note in a new pane https://github.com/scambier/obsidian-omnisearch/issues/61
|
||||||
|
* Improved highlighting https://github.com/scambier/obsidian-omnisearch/issues/85
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
|
||||||
|
* Fixed some edge cases when opening an already open/pinned note https://github.com/scambier/obsidian-omnisearch/issues/51 https://github.com/scambier/obsidian-omnisearch/issues/80
|
||||||
|
* Fixed nested tags searching https://github.com/scambier/obsidian-omnisearch/issues/79
|
||||||
|
* Fixed a silent crash when clicking on In-File search results https://github.com/scambier/obsidian-omnisearch/issues/84
|
||||||
|
|
||||||
|
## 1.4.x
|
||||||
|
|
||||||
|
### New
|
||||||
|
|
||||||
|
- Opt-in support for Vim navigation keys: https://github.com/scambier/obsidian-omnisearch/issues/26
|
||||||
|
- Opt-in display of "short form" links: https://github.com/scambier/obsidian-omnisearch/issues/59
|
||||||
|
- Opt-in search index serialization, for faster loading times: https://github.com/scambier/obsidian-omnisearch/pull/64 by @mnaoumov
|
||||||
|
- Opt-out: diacritics can now be ignored
|
||||||
|
- Added support for `#tag` searches: https://github.com/scambier/obsidian-omnisearch/issues/48
|
||||||
|
- Added a basic public API for integration with other plugins: https://github.com/scambier/obsidian-omnisearch/issues/22 https://github.com/scambier/obsidian-omnisearch/issues/69
|
||||||
|
- Use `alt+enter` to inject a link to the currently selected search result item: https://github.com/scambier/obsidian-omnisearch/issues/32
|
||||||
|
|
||||||
|
|
||||||
|
### Improved
|
||||||
|
|
||||||
|
- You can now switch between "Vault" and "In-File" modals with `tab`
|
||||||
|
- Search index updates are now done only when Omnisearch is invoked: https://github.com/scambier/obsidian-omnisearch/issues/57
|
||||||
|
- New files are now created empty: https://github.com/scambier/obsidian-omnisearch/issues/77
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
|
||||||
|
- Opening a pinned note would open it a second time: https://github.com/scambier/obsidian-omnisearch/issues/51
|
||||||
|
- Fixed an issue that would index "non-existing notes" multiple times: https://github.com/scambier/obsidian-omnisearch/issues/68
|
||||||
|
- Fixed a visual bug for Obsidian 0.15.3: https://github.com/scambier/obsidian-omnisearch/issues/76
|
||||||
|
- Fixed the diacritics normalization of the note's title: https://github.com/scambier/obsidian-omnisearch/issues/72
|
||||||
|
|
||||||
|
## 1.3.x
|
||||||
|
|
||||||
|
### New
|
||||||
|
|
||||||
|
* Chinese support by @aidenlx in https://github.com/scambier/obsidian-omnisearch/pull/37
|
||||||
|
* You need to install https://github.com/aidenlx/cm-chs-patch to enable this feature
|
||||||
|
* Settings page https://github.com/scambier/obsidian-omnisearch/issues/41
|
||||||
|
* Do not show indexing Notice by default by @chrisgrieser in https://github.com/scambier/obsidian-omnisearch/pull/46
|
||||||
|
* Include notes that don't exist https://github.com/scambier/obsidian-omnisearch/issues/14
|
||||||
|
|
||||||
|
### Improved
|
||||||
|
|
||||||
|
* Better accessibility https://github.com/scambier/obsidian-omnisearch/issues/50
|
||||||
|
* Note aliases are now scored as high as the filename in search results https://github.com/scambier/obsidian-omnisearch/issues/34
|
||||||
|
* By default, reindexing is now done when the app is out of focus, and not after each save https://github.com/scambier/obsidian-omnisearch/issues/57
|
||||||
|
* On mobile, indexing is only done at startup
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
|
||||||
|
* Showing an error when a note can't be created https://github.com/scambier/obsidian-omnisearch/issues/52
|
||||||
|
|
||||||
|
|
||||||
|
## 1.2.x
|
||||||
|
|
||||||
|
### New
|
||||||
|
* #42 Files that are present in Obsidian's "Excluded Files" list are downranked by a factor of 3 (_desktop only_)
|
||||||
|
|
||||||
|
## 1.1.1
|
||||||
|
|
||||||
|
### Fixes
|
||||||
|
* Fixed a crash when no results were returned
|
||||||
|
|
||||||
|
## 1.1.0
|
||||||
|
|
||||||
|
### New
|
||||||
|
* #25 Search filters: expressions in quotes and exclusions
|
||||||
|
* Added support for beta versions with [BRAT](https://github.com/TfTHacker/obsidian42-brat)
|
||||||
|
|
||||||
|
This works as a "post-search" filter and does not allow for partial words searches (see #35)
|
||||||
|
|
||||||
|
### Fixes
|
||||||
|
* #39 Fixed key events not correctly prevented in the search input
|
||||||
|
|
||||||
|
**Full Changelog**: https://github.com/scambier/obsidian-omnisearch/compare/1.0.1...1.1.0
|
||||||
|
|
||||||
|
## 1.0.1
|
||||||
|
|
||||||
|
## 1.0.0
|
||||||
|
|
||||||
|
* First non-beta release
|
||||||
|
* Includes Vault search and In-File search
|
47
CONTRIBUTING.md
Normal file
47
CONTRIBUTING.md
Normal file
|
@ -0,0 +1,47 @@
|
||||||
|
# Contributing to Omnisearch
|
||||||
|
|
||||||
|
_<small>This document is a Work In Progress.</small>_
|
||||||
|
|
||||||
|
Thank you for wanting to make Omnisearch an even better plugin :)
|
||||||
|
|
||||||
|
Please read this document before beginning work on a Pull Request.
|
||||||
|
|
||||||
|
## Preface
|
||||||
|
|
||||||
|
- Omnisearch is a personal hobby project. I'm happy to discuss about your ideas and additions, but ultimately it is my code to grow and maintain.
|
||||||
|
- ❗ Always file an issue/feature request before working on a PR, to make sure we're aligned and no-one is making useless work.
|
||||||
|
|
||||||
|
## "Good First Issue"
|
||||||
|
|
||||||
|
Are you a beginner, looking for a small open source contribution? Look at the "[good first issues](https://github.com/scambier/obsidian-omnisearch/labels/good%20first%20issue)". Those issues have a limited scope, don't require intricate knowledge of the code, and are easy enough to locate, fix, and test.
|
||||||
|
|
||||||
|
If you wish to work on one of these issues, leave a comment and I'll assign it to you and give you some pointers.
|
||||||
|
|
||||||
|
## Code guidelines
|
||||||
|
|
||||||
|
- ❗ By default, start your fork from the `develop` branch. If the `develop` branch is behind `master`, then use `master`. When in doubt, ask :)
|
||||||
|
- Don't add npm dependencies if you can avoid it. If a new dependency is unavoidable, be mindful of its size, freshness and added value.
|
||||||
|
- Use Svelte for all UI needs.
|
||||||
|
- Try to not shoehorn your code into existing functions or components.
|
||||||
|
- Simple is better. OOP is not inevitable. Simple functions often work as well, if not better.
|
||||||
|
- If you must use OOP, avoid inheritance as much as possible, no one likes digging several layers of abstraction.
|
||||||
|
- Comment the code. What, why, how, just make your intent clear.
|
||||||
|
|
||||||
|
## Philosphy
|
||||||
|
|
||||||
|
Always respect those UI & UX points:
|
||||||
|
- The core feature of Omnisearch is its "smartness".
|
||||||
|
- The simplest queries must bring relevant results.
|
||||||
|
- The search interface is a means to an end.
|
||||||
|
- The less user interactions, the better.
|
||||||
|
- All settings must have sane defaults.
|
||||||
|
- The UI must not block / show visible lag.
|
||||||
|
- Keyboard navigation first
|
||||||
|
- If you're adding a feature, make it toggleable (if desirable).
|
||||||
|
- The results must always come fast by default.
|
||||||
|
|
||||||
|
## Style guidelines
|
||||||
|
|
||||||
|
- .ts files must be formatted with "Prettier ESLint"
|
||||||
|
- .svelte files must be formatted with "Svelte for VS Code"
|
||||||
|
- All CSS code **must** go into styles.css, and all classes should be properly named for easy customization. Do **not** use `<style>` tags in Svelte components
|
13
Doc Omnisearch/Customization.md
Normal file
13
Doc Omnisearch/Customization.md
Normal file
|
@ -0,0 +1,13 @@
|
||||||
|
There are several CSS classes you can use to customize the appearance of Omnisearch.
|
||||||
|
|
||||||
|
```css
|
||||||
|
.omnisearch-modal
|
||||||
|
.omnisearch-result
|
||||||
|
.omnisearch-result__title
|
||||||
|
.omnisearch-result__counter
|
||||||
|
.omnisearch-result__body
|
||||||
|
.omnisearch-highlight
|
||||||
|
.omnisearch-input-container
|
||||||
|
.omnisearch-input-field
|
||||||
|
```
|
||||||
|
*This is list is not exhaustive, some classes might be added as Omnisearch is evolving.*
|
37
Doc Omnisearch/How to use Omnisearch.md
Normal file
37
Doc Omnisearch/How to use Omnisearch.md
Normal file
|
@ -0,0 +1,37 @@
|
||||||
|
Omnisearch is designed to be as unobtrusive as possible. Install it, let it index your vault, and it works.
|
||||||
|
|
||||||
|
It is quite different than the core search plugin, and is not intended to replace it. ==**It's a tool to help you find your notes as fast as possible**==. If you're well organized, know what is in your vault and where each note is located, it won't probably be very useful to you.
|
||||||
|
|
||||||
|
But if your vault looks like a messy desk full of scattered papers and post-it notes, Omnisearch might be the tool you need.
|
||||||
|
|
||||||
|
## Omnisearch contexts
|
||||||
|
|
||||||
|
### Vault Search
|
||||||
|
|
||||||
|
Omnisearch's core feature, accessible with the Command Palette "**_Omnisearch: Vault search_**". This modal searches through your vault and returns the most relevant documents. That's all you need to find a note.
|
||||||
|
|
||||||
|
If you want to list all the search matches of a single note, you can do so by using `tab` to open the **In-File Search**.
|
||||||
|
|
||||||
|
### In-File Search
|
||||||
|
|
||||||
|
Also accessible through the Command Palette "**_Omnisearch: In-file search_**". This modal searches through the active note's content and lists the matching results. Press enter to automatically scroll to the right place.
|
||||||
|
|
||||||
|
Note that this modal is unavailable if your active file is not a Markdown document.
|
||||||
|
|
||||||
|
## Efficiently looking for documents
|
||||||
|
|
||||||
|
Omnisearch maintains an index of words from your notes. When you type in a query, it compares the words from your query to the words in its index, and returns the most relevant notes.
|
||||||
|
|
||||||
|
> [!IMPORTANT] The best queries are the most spontaneous
|
||||||
|
> A good query should contain the "important" words of the note you're trying to find. Important words are the first words that come to your mind when you think about a note.
|
||||||
|
|
||||||
|
They're the ones in the filename, directory, in the titles, that are often repeated throughout the note, or quite unique to it.
|
||||||
|
|
||||||
|
While Omnisearch does not have the advanced features of the core search, there are a few options you can use to filter results.
|
||||||
|
|
||||||
|
## Advanced tips
|
||||||
|
|
||||||
|
- Use `path:"<somepath>"` to restrict your results to corresponding paths
|
||||||
|
- Use `ext:"png jpg"` or `ext:png`, or a plain `.png` to specify the filetype(s)
|
||||||
|
- Use `"exact expressions"` in quotes to further filter the results returned by the query
|
||||||
|
- Use `-exclusions` to exclude notes containing certain words
|
3
Doc Omnisearch/Images, PDFs, and non-text documents.md
Normal file
3
Doc Omnisearch/Images, PDFs, and non-text documents.md
Normal file
|
@ -0,0 +1,3 @@
|
||||||
|
Omnisearch can index PDFs, images, and some Microsoft Office documents with the help of [Text Extractor](https://github.com/scambier/obsidian-text-extractor). You can install this plugin, and enable the relevant settings.
|
||||||
|
|
||||||
|
Keep in mind that extracting text from those files is not guaranteed. At the time of writing, many PDFs don't work with Text Extractor.
|
30
Doc Omnisearch/Index.md
Normal file
30
Doc Omnisearch/Index.md
Normal file
|
@ -0,0 +1,30 @@
|
||||||
|
**Omnisearch** is a search engine that "_just works_". It always instantly shows you the most relevant results, thanks to its smart weighting algorithm.
|
||||||
|
|
||||||
|
## Installation
|
||||||
|
|
||||||
|
- Omnisearch is available on [the official Community Plugins repository](https://obsidian.md/plugins?search=Omnisearch).
|
||||||
|
- Beta releases can be installed through [BRAT](https://github.com/TfTHacker/obsidian42-brat). **Be advised that those versions can be buggy and break things.**
|
||||||
|
|
||||||
|
> [!INFO] Chinese users
|
||||||
|
> If you have notes in Chinese, you should install [this additional plugin](https://github.com/aidenlx/cm-chs-patch) for better search results.
|
||||||
|
|
||||||
|
## Documentation
|
||||||
|
|
||||||
|
- [[How to use Omnisearch]]
|
||||||
|
- [[Images, PDFs, and non-text documents]]
|
||||||
|
- [[Public API & URL Scheme]]
|
||||||
|
- [[Inject Omnisearch results into your search engine]]
|
||||||
|
- [[Issues & Solutions]]
|
||||||
|
|
||||||
|
## LICENSE
|
||||||
|
|
||||||
|
Omnisearch is licensed under [GPL-3](https://tldrlegal.com/license/gnu-general-public-license-v3-(gpl-3)). You're welcome to fork it and use its code in your own project, but you must disclose your code and publish it under the same terms.
|
||||||
|
|
||||||
|
## Thanks
|
||||||
|
|
||||||
|
❤ To all people who donate through [Ko-Fi](https://ko-fi.com/scambier) or [Github Sponsors](https://github.com/sponsors/scambier), to code contributors, and to the Obsidian team who graciously provides this Publish space ❤
|
||||||
|
|
||||||
|
If you wish to get involved in Omnisearch's development, there are [open issues](https://github.com/scambier/obsidian-omnisearch/issues) that need to be solved, and probably several of them tagged as "[good first issue](https://github.com/scambier/obsidian-omnisearch/issues?q=is%3Aissue+is%3Aopen+label%3A%22good+first+issue%22)" :)
|
||||||
|
|
||||||
|
|
||||||
|

|
|
@ -0,0 +1,26 @@
|
||||||
|
It is possible to get Omnisearch results into your favorite Internet search engine, to increase discoverability of your notes.
|
||||||
|
|
||||||
|
## How-to
|
||||||
|
|
||||||
|
1. Install the latest version of [Omnisearch](https://obsidian.md/plugins?search=Omnisearch)
|
||||||
|
2. Enable the HTTP server in Omnisearch settings ![[Pasted image 20231015195107.png]]
|
||||||
|
3. Install [Tampermonkey](https://www.tampermonkey.net/) (or another userscript manager) for your browser
|
||||||
|
4. Install the userscript corresponding to your favorite search engine:
|
||||||
|
- [Kagi](https://github.com/scambier/userscripts/raw/master/dist/obsidian-omnisearch-kagi.user.js)
|
||||||
|
- [Google](https://github.com/scambier/userscripts/raw/master/dist/obsidian-omnisearch-google.user.js)
|
||||||
|
- [DuckDuckGo](https://github.com/scambier/userscripts/raw/master/dist/obsidian-omnisearch-ddg.user.js)
|
||||||
|
- [Bing](https://github.com/scambier/userscripts/raw/master/dist/obsidian-omnisearch-bing.user.js)
|
||||||
|
|
||||||
|
> [!question] Userscripts
|
||||||
|
> [Userscripts](https://en.wikipedia.org/wiki/Userscript) are "micro plugins" for your browser, they're small hackable JavaScript programs intended to modify the appearance or behavior of some sites.
|
||||||
|
|
||||||
|
> [!info] HTTP Server
|
||||||
|
> More info on Omnisearch's HTTP server [[Public API & URL Scheme#HTTP Server API|here]].
|
||||||
|
|
||||||
|
## Demos
|
||||||
|
|
||||||
|
![[Pasted image 20231015190539.png]]
|
||||||
|
<small>Omnisearch results injected in Google</small>
|
||||||
|
|
||||||
|
![[Pasted image 20231016173131.png]]
|
||||||
|
<small>Omnisearch results injected in Kagi</small>
|
36
Doc Omnisearch/Issues & Solutions.md
Normal file
36
Doc Omnisearch/Issues & Solutions.md
Normal file
|
@ -0,0 +1,36 @@
|
||||||
|
Despite our best efforts, we unfortunately can't totally prevent bugs and performance issues. Those few tips should hopefully help you fix Omnisearch.
|
||||||
|
|
||||||
|
**Omnisearch makes Obsidian sluggish/freeze at startup.**
|
||||||
|
|
||||||
|
- While Omnisearch does its best to work smoothly in the background, bigger vaults and files can make Obsidian stutter during indexing.
|
||||||
|
- If you have several thousands of files, Obsidian may freeze a few seconds at startup while its cache is loaded in memory.
|
||||||
|
- To avoid boot loop crashes, Omnisearch will automatically disable its cache if there is an issue at startup.
|
||||||
|
|
||||||
|
**Omnisearch crashes on Android/iOS.**
|
||||||
|
|
||||||
|
- If you have many notes, Omnisearch can consume more RAM than what is available on your device. This can cause hard crashes, and there is no solution other than to disable Omnisearch.
|
||||||
|
- iOS devices are more prone to crashes when loading the cache. For this reason, caching is disabled on iOS.
|
||||||
|
|
||||||
|
**Omnisearch seems to make Obsidian slower.**
|
||||||
|
|
||||||
|
- Once Obsidian has indexed your files at startup, it doesn't do any background work while its modal is closed. Your changes are not indexed until you open the modal again. If you experience slowdowns while using Obsidian, it's unlikely that Omnisearch is responsible.
|
||||||
|
- However, Text Extractor can make Obsidian slower while indexing PDFs and images for the first time. If you don't need those features, you can disable them in the plugin settings.
|
||||||
|
|
||||||
|
**Omnisearch is slow to index my PDFs and images**
|
||||||
|
|
||||||
|
- The first time Text Extractor reads those files, it can take a long time to extract their text. The results are then cached for the text startup.
|
||||||
|
|
||||||
|
**Omnisearch gives inconsistent/invalid results, there are errors in the developer console**
|
||||||
|
|
||||||
|
- Restart Obsidian to force a reindex of Omnisearch.
|
||||||
|
- The cache could be corrupted; you can clear it at the bottom of the settings page, then restart Obsidian.
|
||||||
|
|
||||||
|
**A query should return a result that does not appear.**
|
||||||
|
|
||||||
|
- If applicable, make sure that "*Ignore diacritics*" is enabled.
|
||||||
|
- If you have modified them, reset weightings to their original values.
|
||||||
|
- Rewrite your query and avoid numbers and common words.
|
||||||
|
|
||||||
|
**I'm still having an issue**
|
||||||
|
|
||||||
|
You can write your issue [here](https://github.com/scambier/obsidian-omnisearch/issues) with as much details as possible.
|
69
Doc Omnisearch/Public API & URL Scheme.md
Normal file
69
Doc Omnisearch/Public API & URL Scheme.md
Normal file
|
@ -0,0 +1,69 @@
|
||||||
|
|
||||||
|
For technical users and plugins developers, Omnisearch exposes several utilities to integrate it into other plugins or 3rd party tools.
|
||||||
|
|
||||||
|
## URL Scheme
|
||||||
|
|
||||||
|
You can open Omnisearch and trigger a search with the following scheme: `obsidian://omnisearch?query=foo bar`. This will switch the focus to Obsidian, open Omnisearch, and execute the query "foo bar".
|
||||||
|
## Omnisearch API
|
||||||
|
|
||||||
|
Access it directly within Obsidian with the global `omnisearch` object.
|
||||||
|
|
||||||
|
```ts
|
||||||
|
// API:
|
||||||
|
type OmnisearchApi = {
|
||||||
|
// Returns a promise that will contain the same results as the Vault modal
|
||||||
|
search: (query: string) => Promise<ResultNoteApi[]>,
|
||||||
|
// Refreshes the index
|
||||||
|
refreshIndex: () => Promise<void>
|
||||||
|
// Register a callback that will be called when the indexing is done
|
||||||
|
registerOnIndexed: (callback: () => void) => void,
|
||||||
|
// Unregister a callback that was previously registered
|
||||||
|
unregisterOnIndexed: (callback: () => void) => void,
|
||||||
|
}
|
||||||
|
|
||||||
|
type ResultNoteApi = {
|
||||||
|
score: number
|
||||||
|
vault: string
|
||||||
|
path: string
|
||||||
|
basename: string
|
||||||
|
foundWords: string[]
|
||||||
|
matches: SearchMatchApi[]
|
||||||
|
excerpt: string
|
||||||
|
}
|
||||||
|
|
||||||
|
type SearchMatchApi = {
|
||||||
|
match: string
|
||||||
|
offset: number
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Example: Dataview Integration
|
||||||
|
|
||||||
|
You can use the Omnisearch API directly within the [Dataview](https://blacksmithgu.github.io/obsidian-dataview/) plugin.
|
||||||
|
|
||||||
|
~~~js
|
||||||
|
```dataviewjs
|
||||||
|
const results = await omnisearch.search('your query')
|
||||||
|
const arr = dv.array(results).sort(r => r.score, 'desc')
|
||||||
|
dv.table(['File', 'Score'], arr.map(o => [dv.fileLink(o.path), Math.round(o.score)]))
|
||||||
|
```
|
||||||
|
~~~
|
||||||
|
|
||||||
|
## HTTP Server API
|
||||||
|
|
||||||
|
For our most tech-savvy users, Omnisearch comes with a simple HTTP server. That makes it possible to query Omnisearch from 3rd-party applications running on your computer.
|
||||||
|
|
||||||
|
```
|
||||||
|
GET http://localhost:51361/search?q=your%20query
|
||||||
|
```
|
||||||
|
|
||||||
|
This will return a JSON array of `ResultNoteApi`, exactly like the "internal" API.
|
||||||
|
|
||||||
|
> [!Important]
|
||||||
|
> The HTTP Server must be activated in Omnisearch settings. It is not accessible outside of localhost. The server is automatically stopped when closing Obsidian.
|
||||||
|
>
|
||||||
|
> This feature is not available on mobile.
|
||||||
|
|
||||||
|
**Usage example: [[Inject Omnisearch results into your search engine]]**
|
||||||
|
|
||||||
|
|
BIN
Doc Omnisearch/media/Pasted image 20231015190539.png
Normal file
BIN
Doc Omnisearch/media/Pasted image 20231015190539.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 183 KiB |
BIN
Doc Omnisearch/media/Pasted image 20231015195107.png
Normal file
BIN
Doc Omnisearch/media/Pasted image 20231015195107.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 5.8 KiB |
BIN
Doc Omnisearch/media/Pasted image 20231016173131.png
Normal file
BIN
Doc Omnisearch/media/Pasted image 20231016173131.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 113 KiB |
BIN
Doc Omnisearch/media/omnisearch-web.webm
Normal file
BIN
Doc Omnisearch/media/omnisearch-web.webm
Normal file
Binary file not shown.
674
LICENSE
Normal file
674
LICENSE
Normal file
|
@ -0,0 +1,674 @@
|
||||||
|
GNU GENERAL PUBLIC LICENSE
|
||||||
|
Version 3, 29 June 2007
|
||||||
|
|
||||||
|
Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/>
|
||||||
|
Everyone is permitted to copy and distribute verbatim copies
|
||||||
|
of this license document, but changing it is not allowed.
|
||||||
|
|
||||||
|
Preamble
|
||||||
|
|
||||||
|
The GNU General Public License is a free, copyleft license for
|
||||||
|
software and other kinds of works.
|
||||||
|
|
||||||
|
The licenses for most software and other practical works are designed
|
||||||
|
to take away your freedom to share and change the works. By contrast,
|
||||||
|
the GNU General Public License is intended to guarantee your freedom to
|
||||||
|
share and change all versions of a program--to make sure it remains free
|
||||||
|
software for all its users. We, the Free Software Foundation, use the
|
||||||
|
GNU General Public License for most of our software; it applies also to
|
||||||
|
any other work released this way by its authors. You can apply it to
|
||||||
|
your programs, too.
|
||||||
|
|
||||||
|
When we speak of free software, we are referring to freedom, not
|
||||||
|
price. Our General Public Licenses are designed to make sure that you
|
||||||
|
have the freedom to distribute copies of free software (and charge for
|
||||||
|
them if you wish), that you receive source code or can get it if you
|
||||||
|
want it, that you can change the software or use pieces of it in new
|
||||||
|
free programs, and that you know you can do these things.
|
||||||
|
|
||||||
|
To protect your rights, we need to prevent others from denying you
|
||||||
|
these rights or asking you to surrender the rights. Therefore, you have
|
||||||
|
certain responsibilities if you distribute copies of the software, or if
|
||||||
|
you modify it: responsibilities to respect the freedom of others.
|
||||||
|
|
||||||
|
For example, if you distribute copies of such a program, whether
|
||||||
|
gratis or for a fee, you must pass on to the recipients the same
|
||||||
|
freedoms that you received. You must make sure that they, too, receive
|
||||||
|
or can get the source code. And you must show them these terms so they
|
||||||
|
know their rights.
|
||||||
|
|
||||||
|
Developers that use the GNU GPL protect your rights with two steps:
|
||||||
|
(1) assert copyright on the software, and (2) offer you this License
|
||||||
|
giving you legal permission to copy, distribute and/or modify it.
|
||||||
|
|
||||||
|
For the developers' and authors' protection, the GPL clearly explains
|
||||||
|
that there is no warranty for this free software. For both users' and
|
||||||
|
authors' sake, the GPL requires that modified versions be marked as
|
||||||
|
changed, so that their problems will not be attributed erroneously to
|
||||||
|
authors of previous versions.
|
||||||
|
|
||||||
|
Some devices are designed to deny users access to install or run
|
||||||
|
modified versions of the software inside them, although the manufacturer
|
||||||
|
can do so. This is fundamentally incompatible with the aim of
|
||||||
|
protecting users' freedom to change the software. The systematic
|
||||||
|
pattern of such abuse occurs in the area of products for individuals to
|
||||||
|
use, which is precisely where it is most unacceptable. Therefore, we
|
||||||
|
have designed this version of the GPL to prohibit the practice for those
|
||||||
|
products. If such problems arise substantially in other domains, we
|
||||||
|
stand ready to extend this provision to those domains in future versions
|
||||||
|
of the GPL, as needed to protect the freedom of users.
|
||||||
|
|
||||||
|
Finally, every program is threatened constantly by software patents.
|
||||||
|
States should not allow patents to restrict development and use of
|
||||||
|
software on general-purpose computers, but in those that do, we wish to
|
||||||
|
avoid the special danger that patents applied to a free program could
|
||||||
|
make it effectively proprietary. To prevent this, the GPL assures that
|
||||||
|
patents cannot be used to render the program non-free.
|
||||||
|
|
||||||
|
The precise terms and conditions for copying, distribution and
|
||||||
|
modification follow.
|
||||||
|
|
||||||
|
TERMS AND CONDITIONS
|
||||||
|
|
||||||
|
0. Definitions.
|
||||||
|
|
||||||
|
"This License" refers to version 3 of the GNU General Public License.
|
||||||
|
|
||||||
|
"Copyright" also means copyright-like laws that apply to other kinds of
|
||||||
|
works, such as semiconductor masks.
|
||||||
|
|
||||||
|
"The Program" refers to any copyrightable work licensed under this
|
||||||
|
License. Each licensee is addressed as "you". "Licensees" and
|
||||||
|
"recipients" may be individuals or organizations.
|
||||||
|
|
||||||
|
To "modify" a work means to copy from or adapt all or part of the work
|
||||||
|
in a fashion requiring copyright permission, other than the making of an
|
||||||
|
exact copy. The resulting work is called a "modified version" of the
|
||||||
|
earlier work or a work "based on" the earlier work.
|
||||||
|
|
||||||
|
A "covered work" means either the unmodified Program or a work based
|
||||||
|
on the Program.
|
||||||
|
|
||||||
|
To "propagate" a work means to do anything with it that, without
|
||||||
|
permission, would make you directly or secondarily liable for
|
||||||
|
infringement under applicable copyright law, except executing it on a
|
||||||
|
computer or modifying a private copy. Propagation includes copying,
|
||||||
|
distribution (with or without modification), making available to the
|
||||||
|
public, and in some countries other activities as well.
|
||||||
|
|
||||||
|
To "convey" a work means any kind of propagation that enables other
|
||||||
|
parties to make or receive copies. Mere interaction with a user through
|
||||||
|
a computer network, with no transfer of a copy, is not conveying.
|
||||||
|
|
||||||
|
An interactive user interface displays "Appropriate Legal Notices"
|
||||||
|
to the extent that it includes a convenient and prominently visible
|
||||||
|
feature that (1) displays an appropriate copyright notice, and (2)
|
||||||
|
tells the user that there is no warranty for the work (except to the
|
||||||
|
extent that warranties are provided), that licensees may convey the
|
||||||
|
work under this License, and how to view a copy of this License. If
|
||||||
|
the interface presents a list of user commands or options, such as a
|
||||||
|
menu, a prominent item in the list meets this criterion.
|
||||||
|
|
||||||
|
1. Source Code.
|
||||||
|
|
||||||
|
The "source code" for a work means the preferred form of the work
|
||||||
|
for making modifications to it. "Object code" means any non-source
|
||||||
|
form of a work.
|
||||||
|
|
||||||
|
A "Standard Interface" means an interface that either is an official
|
||||||
|
standard defined by a recognized standards body, or, in the case of
|
||||||
|
interfaces specified for a particular programming language, one that
|
||||||
|
is widely used among developers working in that language.
|
||||||
|
|
||||||
|
The "System Libraries" of an executable work include anything, other
|
||||||
|
than the work as a whole, that (a) is included in the normal form of
|
||||||
|
packaging a Major Component, but which is not part of that Major
|
||||||
|
Component, and (b) serves only to enable use of the work with that
|
||||||
|
Major Component, or to implement a Standard Interface for which an
|
||||||
|
implementation is available to the public in source code form. A
|
||||||
|
"Major Component", in this context, means a major essential component
|
||||||
|
(kernel, window system, and so on) of the specific operating system
|
||||||
|
(if any) on which the executable work runs, or a compiler used to
|
||||||
|
produce the work, or an object code interpreter used to run it.
|
||||||
|
|
||||||
|
The "Corresponding Source" for a work in object code form means all
|
||||||
|
the source code needed to generate, install, and (for an executable
|
||||||
|
work) run the object code and to modify the work, including scripts to
|
||||||
|
control those activities. However, it does not include the work's
|
||||||
|
System Libraries, or general-purpose tools or generally available free
|
||||||
|
programs which are used unmodified in performing those activities but
|
||||||
|
which are not part of the work. For example, Corresponding Source
|
||||||
|
includes interface definition files associated with source files for
|
||||||
|
the work, and the source code for shared libraries and dynamically
|
||||||
|
linked subprograms that the work is specifically designed to require,
|
||||||
|
such as by intimate data communication or control flow between those
|
||||||
|
subprograms and other parts of the work.
|
||||||
|
|
||||||
|
The Corresponding Source need not include anything that users
|
||||||
|
can regenerate automatically from other parts of the Corresponding
|
||||||
|
Source.
|
||||||
|
|
||||||
|
The Corresponding Source for a work in source code form is that
|
||||||
|
same work.
|
||||||
|
|
||||||
|
2. Basic Permissions.
|
||||||
|
|
||||||
|
All rights granted under this License are granted for the term of
|
||||||
|
copyright on the Program, and are irrevocable provided the stated
|
||||||
|
conditions are met. This License explicitly affirms your unlimited
|
||||||
|
permission to run the unmodified Program. The output from running a
|
||||||
|
covered work is covered by this License only if the output, given its
|
||||||
|
content, constitutes a covered work. This License acknowledges your
|
||||||
|
rights of fair use or other equivalent, as provided by copyright law.
|
||||||
|
|
||||||
|
You may make, run and propagate covered works that you do not
|
||||||
|
convey, without conditions so long as your license otherwise remains
|
||||||
|
in force. You may convey covered works to others for the sole purpose
|
||||||
|
of having them make modifications exclusively for you, or provide you
|
||||||
|
with facilities for running those works, provided that you comply with
|
||||||
|
the terms of this License in conveying all material for which you do
|
||||||
|
not control copyright. Those thus making or running the covered works
|
||||||
|
for you must do so exclusively on your behalf, under your direction
|
||||||
|
and control, on terms that prohibit them from making any copies of
|
||||||
|
your copyrighted material outside their relationship with you.
|
||||||
|
|
||||||
|
Conveying under any other circumstances is permitted solely under
|
||||||
|
the conditions stated below. Sublicensing is not allowed; section 10
|
||||||
|
makes it unnecessary.
|
||||||
|
|
||||||
|
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
|
||||||
|
|
||||||
|
No covered work shall be deemed part of an effective technological
|
||||||
|
measure under any applicable law fulfilling obligations under article
|
||||||
|
11 of the WIPO copyright treaty adopted on 20 December 1996, or
|
||||||
|
similar laws prohibiting or restricting circumvention of such
|
||||||
|
measures.
|
||||||
|
|
||||||
|
When you convey a covered work, you waive any legal power to forbid
|
||||||
|
circumvention of technological measures to the extent such circumvention
|
||||||
|
is effected by exercising rights under this License with respect to
|
||||||
|
the covered work, and you disclaim any intention to limit operation or
|
||||||
|
modification of the work as a means of enforcing, against the work's
|
||||||
|
users, your or third parties' legal rights to forbid circumvention of
|
||||||
|
technological measures.
|
||||||
|
|
||||||
|
4. Conveying Verbatim Copies.
|
||||||
|
|
||||||
|
You may convey verbatim copies of the Program's source code as you
|
||||||
|
receive it, in any medium, provided that you conspicuously and
|
||||||
|
appropriately publish on each copy an appropriate copyright notice;
|
||||||
|
keep intact all notices stating that this License and any
|
||||||
|
non-permissive terms added in accord with section 7 apply to the code;
|
||||||
|
keep intact all notices of the absence of any warranty; and give all
|
||||||
|
recipients a copy of this License along with the Program.
|
||||||
|
|
||||||
|
You may charge any price or no price for each copy that you convey,
|
||||||
|
and you may offer support or warranty protection for a fee.
|
||||||
|
|
||||||
|
5. Conveying Modified Source Versions.
|
||||||
|
|
||||||
|
You may convey a work based on the Program, or the modifications to
|
||||||
|
produce it from the Program, in the form of source code under the
|
||||||
|
terms of section 4, provided that you also meet all of these conditions:
|
||||||
|
|
||||||
|
a) The work must carry prominent notices stating that you modified
|
||||||
|
it, and giving a relevant date.
|
||||||
|
|
||||||
|
b) The work must carry prominent notices stating that it is
|
||||||
|
released under this License and any conditions added under section
|
||||||
|
7. This requirement modifies the requirement in section 4 to
|
||||||
|
"keep intact all notices".
|
||||||
|
|
||||||
|
c) You must license the entire work, as a whole, under this
|
||||||
|
License to anyone who comes into possession of a copy. This
|
||||||
|
License will therefore apply, along with any applicable section 7
|
||||||
|
additional terms, to the whole of the work, and all its parts,
|
||||||
|
regardless of how they are packaged. This License gives no
|
||||||
|
permission to license the work in any other way, but it does not
|
||||||
|
invalidate such permission if you have separately received it.
|
||||||
|
|
||||||
|
d) If the work has interactive user interfaces, each must display
|
||||||
|
Appropriate Legal Notices; however, if the Program has interactive
|
||||||
|
interfaces that do not display Appropriate Legal Notices, your
|
||||||
|
work need not make them do so.
|
||||||
|
|
||||||
|
A compilation of a covered work with other separate and independent
|
||||||
|
works, which are not by their nature extensions of the covered work,
|
||||||
|
and which are not combined with it such as to form a larger program,
|
||||||
|
in or on a volume of a storage or distribution medium, is called an
|
||||||
|
"aggregate" if the compilation and its resulting copyright are not
|
||||||
|
used to limit the access or legal rights of the compilation's users
|
||||||
|
beyond what the individual works permit. Inclusion of a covered work
|
||||||
|
in an aggregate does not cause this License to apply to the other
|
||||||
|
parts of the aggregate.
|
||||||
|
|
||||||
|
6. Conveying Non-Source Forms.
|
||||||
|
|
||||||
|
You may convey a covered work in object code form under the terms
|
||||||
|
of sections 4 and 5, provided that you also convey the
|
||||||
|
machine-readable Corresponding Source under the terms of this License,
|
||||||
|
in one of these ways:
|
||||||
|
|
||||||
|
a) Convey the object code in, or embodied in, a physical product
|
||||||
|
(including a physical distribution medium), accompanied by the
|
||||||
|
Corresponding Source fixed on a durable physical medium
|
||||||
|
customarily used for software interchange.
|
||||||
|
|
||||||
|
b) Convey the object code in, or embodied in, a physical product
|
||||||
|
(including a physical distribution medium), accompanied by a
|
||||||
|
written offer, valid for at least three years and valid for as
|
||||||
|
long as you offer spare parts or customer support for that product
|
||||||
|
model, to give anyone who possesses the object code either (1) a
|
||||||
|
copy of the Corresponding Source for all the software in the
|
||||||
|
product that is covered by this License, on a durable physical
|
||||||
|
medium customarily used for software interchange, for a price no
|
||||||
|
more than your reasonable cost of physically performing this
|
||||||
|
conveying of source, or (2) access to copy the
|
||||||
|
Corresponding Source from a network server at no charge.
|
||||||
|
|
||||||
|
c) Convey individual copies of the object code with a copy of the
|
||||||
|
written offer to provide the Corresponding Source. This
|
||||||
|
alternative is allowed only occasionally and noncommercially, and
|
||||||
|
only if you received the object code with such an offer, in accord
|
||||||
|
with subsection 6b.
|
||||||
|
|
||||||
|
d) Convey the object code by offering access from a designated
|
||||||
|
place (gratis or for a charge), and offer equivalent access to the
|
||||||
|
Corresponding Source in the same way through the same place at no
|
||||||
|
further charge. You need not require recipients to copy the
|
||||||
|
Corresponding Source along with the object code. If the place to
|
||||||
|
copy the object code is a network server, the Corresponding Source
|
||||||
|
may be on a different server (operated by you or a third party)
|
||||||
|
that supports equivalent copying facilities, provided you maintain
|
||||||
|
clear directions next to the object code saying where to find the
|
||||||
|
Corresponding Source. Regardless of what server hosts the
|
||||||
|
Corresponding Source, you remain obligated to ensure that it is
|
||||||
|
available for as long as needed to satisfy these requirements.
|
||||||
|
|
||||||
|
e) Convey the object code using peer-to-peer transmission, provided
|
||||||
|
you inform other peers where the object code and Corresponding
|
||||||
|
Source of the work are being offered to the general public at no
|
||||||
|
charge under subsection 6d.
|
||||||
|
|
||||||
|
A separable portion of the object code, whose source code is excluded
|
||||||
|
from the Corresponding Source as a System Library, need not be
|
||||||
|
included in conveying the object code work.
|
||||||
|
|
||||||
|
A "User Product" is either (1) a "consumer product", which means any
|
||||||
|
tangible personal property which is normally used for personal, family,
|
||||||
|
or household purposes, or (2) anything designed or sold for incorporation
|
||||||
|
into a dwelling. In determining whether a product is a consumer product,
|
||||||
|
doubtful cases shall be resolved in favor of coverage. For a particular
|
||||||
|
product received by a particular user, "normally used" refers to a
|
||||||
|
typical or common use of that class of product, regardless of the status
|
||||||
|
of the particular user or of the way in which the particular user
|
||||||
|
actually uses, or expects or is expected to use, the product. A product
|
||||||
|
is a consumer product regardless of whether the product has substantial
|
||||||
|
commercial, industrial or non-consumer uses, unless such uses represent
|
||||||
|
the only significant mode of use of the product.
|
||||||
|
|
||||||
|
"Installation Information" for a User Product means any methods,
|
||||||
|
procedures, authorization keys, or other information required to install
|
||||||
|
and execute modified versions of a covered work in that User Product from
|
||||||
|
a modified version of its Corresponding Source. The information must
|
||||||
|
suffice to ensure that the continued functioning of the modified object
|
||||||
|
code is in no case prevented or interfered with solely because
|
||||||
|
modification has been made.
|
||||||
|
|
||||||
|
If you convey an object code work under this section in, or with, or
|
||||||
|
specifically for use in, a User Product, and the conveying occurs as
|
||||||
|
part of a transaction in which the right of possession and use of the
|
||||||
|
User Product is transferred to the recipient in perpetuity or for a
|
||||||
|
fixed term (regardless of how the transaction is characterized), the
|
||||||
|
Corresponding Source conveyed under this section must be accompanied
|
||||||
|
by the Installation Information. But this requirement does not apply
|
||||||
|
if neither you nor any third party retains the ability to install
|
||||||
|
modified object code on the User Product (for example, the work has
|
||||||
|
been installed in ROM).
|
||||||
|
|
||||||
|
The requirement to provide Installation Information does not include a
|
||||||
|
requirement to continue to provide support service, warranty, or updates
|
||||||
|
for a work that has been modified or installed by the recipient, or for
|
||||||
|
the User Product in which it has been modified or installed. Access to a
|
||||||
|
network may be denied when the modification itself materially and
|
||||||
|
adversely affects the operation of the network or violates the rules and
|
||||||
|
protocols for communication across the network.
|
||||||
|
|
||||||
|
Corresponding Source conveyed, and Installation Information provided,
|
||||||
|
in accord with this section must be in a format that is publicly
|
||||||
|
documented (and with an implementation available to the public in
|
||||||
|
source code form), and must require no special password or key for
|
||||||
|
unpacking, reading or copying.
|
||||||
|
|
||||||
|
7. Additional Terms.
|
||||||
|
|
||||||
|
"Additional permissions" are terms that supplement the terms of this
|
||||||
|
License by making exceptions from one or more of its conditions.
|
||||||
|
Additional permissions that are applicable to the entire Program shall
|
||||||
|
be treated as though they were included in this License, to the extent
|
||||||
|
that they are valid under applicable law. If additional permissions
|
||||||
|
apply only to part of the Program, that part may be used separately
|
||||||
|
under those permissions, but the entire Program remains governed by
|
||||||
|
this License without regard to the additional permissions.
|
||||||
|
|
||||||
|
When you convey a copy of a covered work, you may at your option
|
||||||
|
remove any additional permissions from that copy, or from any part of
|
||||||
|
it. (Additional permissions may be written to require their own
|
||||||
|
removal in certain cases when you modify the work.) You may place
|
||||||
|
additional permissions on material, added by you to a covered work,
|
||||||
|
for which you have or can give appropriate copyright permission.
|
||||||
|
|
||||||
|
Notwithstanding any other provision of this License, for material you
|
||||||
|
add to a covered work, you may (if authorized by the copyright holders of
|
||||||
|
that material) supplement the terms of this License with terms:
|
||||||
|
|
||||||
|
a) Disclaiming warranty or limiting liability differently from the
|
||||||
|
terms of sections 15 and 16 of this License; or
|
||||||
|
|
||||||
|
b) Requiring preservation of specified reasonable legal notices or
|
||||||
|
author attributions in that material or in the Appropriate Legal
|
||||||
|
Notices displayed by works containing it; or
|
||||||
|
|
||||||
|
c) Prohibiting misrepresentation of the origin of that material, or
|
||||||
|
requiring that modified versions of such material be marked in
|
||||||
|
reasonable ways as different from the original version; or
|
||||||
|
|
||||||
|
d) Limiting the use for publicity purposes of names of licensors or
|
||||||
|
authors of the material; or
|
||||||
|
|
||||||
|
e) Declining to grant rights under trademark law for use of some
|
||||||
|
trade names, trademarks, or service marks; or
|
||||||
|
|
||||||
|
f) Requiring indemnification of licensors and authors of that
|
||||||
|
material by anyone who conveys the material (or modified versions of
|
||||||
|
it) with contractual assumptions of liability to the recipient, for
|
||||||
|
any liability that these contractual assumptions directly impose on
|
||||||
|
those licensors and authors.
|
||||||
|
|
||||||
|
All other non-permissive additional terms are considered "further
|
||||||
|
restrictions" within the meaning of section 10. If the Program as you
|
||||||
|
received it, or any part of it, contains a notice stating that it is
|
||||||
|
governed by this License along with a term that is a further
|
||||||
|
restriction, you may remove that term. If a license document contains
|
||||||
|
a further restriction but permits relicensing or conveying under this
|
||||||
|
License, you may add to a covered work material governed by the terms
|
||||||
|
of that license document, provided that the further restriction does
|
||||||
|
not survive such relicensing or conveying.
|
||||||
|
|
||||||
|
If you add terms to a covered work in accord with this section, you
|
||||||
|
must place, in the relevant source files, a statement of the
|
||||||
|
additional terms that apply to those files, or a notice indicating
|
||||||
|
where to find the applicable terms.
|
||||||
|
|
||||||
|
Additional terms, permissive or non-permissive, may be stated in the
|
||||||
|
form of a separately written license, or stated as exceptions;
|
||||||
|
the above requirements apply either way.
|
||||||
|
|
||||||
|
8. Termination.
|
||||||
|
|
||||||
|
You may not propagate or modify a covered work except as expressly
|
||||||
|
provided under this License. Any attempt otherwise to propagate or
|
||||||
|
modify it is void, and will automatically terminate your rights under
|
||||||
|
this License (including any patent licenses granted under the third
|
||||||
|
paragraph of section 11).
|
||||||
|
|
||||||
|
However, if you cease all violation of this License, then your
|
||||||
|
license from a particular copyright holder is reinstated (a)
|
||||||
|
provisionally, unless and until the copyright holder explicitly and
|
||||||
|
finally terminates your license, and (b) permanently, if the copyright
|
||||||
|
holder fails to notify you of the violation by some reasonable means
|
||||||
|
prior to 60 days after the cessation.
|
||||||
|
|
||||||
|
Moreover, your license from a particular copyright holder is
|
||||||
|
reinstated permanently if the copyright holder notifies you of the
|
||||||
|
violation by some reasonable means, this is the first time you have
|
||||||
|
received notice of violation of this License (for any work) from that
|
||||||
|
copyright holder, and you cure the violation prior to 30 days after
|
||||||
|
your receipt of the notice.
|
||||||
|
|
||||||
|
Termination of your rights under this section does not terminate the
|
||||||
|
licenses of parties who have received copies or rights from you under
|
||||||
|
this License. If your rights have been terminated and not permanently
|
||||||
|
reinstated, you do not qualify to receive new licenses for the same
|
||||||
|
material under section 10.
|
||||||
|
|
||||||
|
9. Acceptance Not Required for Having Copies.
|
||||||
|
|
||||||
|
You are not required to accept this License in order to receive or
|
||||||
|
run a copy of the Program. Ancillary propagation of a covered work
|
||||||
|
occurring solely as a consequence of using peer-to-peer transmission
|
||||||
|
to receive a copy likewise does not require acceptance. However,
|
||||||
|
nothing other than this License grants you permission to propagate or
|
||||||
|
modify any covered work. These actions infringe copyright if you do
|
||||||
|
not accept this License. Therefore, by modifying or propagating a
|
||||||
|
covered work, you indicate your acceptance of this License to do so.
|
||||||
|
|
||||||
|
10. Automatic Licensing of Downstream Recipients.
|
||||||
|
|
||||||
|
Each time you convey a covered work, the recipient automatically
|
||||||
|
receives a license from the original licensors, to run, modify and
|
||||||
|
propagate that work, subject to this License. You are not responsible
|
||||||
|
for enforcing compliance by third parties with this License.
|
||||||
|
|
||||||
|
An "entity transaction" is a transaction transferring control of an
|
||||||
|
organization, or substantially all assets of one, or subdividing an
|
||||||
|
organization, or merging organizations. If propagation of a covered
|
||||||
|
work results from an entity transaction, each party to that
|
||||||
|
transaction who receives a copy of the work also receives whatever
|
||||||
|
licenses to the work the party's predecessor in interest had or could
|
||||||
|
give under the previous paragraph, plus a right to possession of the
|
||||||
|
Corresponding Source of the work from the predecessor in interest, if
|
||||||
|
the predecessor has it or can get it with reasonable efforts.
|
||||||
|
|
||||||
|
You may not impose any further restrictions on the exercise of the
|
||||||
|
rights granted or affirmed under this License. For example, you may
|
||||||
|
not impose a license fee, royalty, or other charge for exercise of
|
||||||
|
rights granted under this License, and you may not initiate litigation
|
||||||
|
(including a cross-claim or counterclaim in a lawsuit) alleging that
|
||||||
|
any patent claim is infringed by making, using, selling, offering for
|
||||||
|
sale, or importing the Program or any portion of it.
|
||||||
|
|
||||||
|
11. Patents.
|
||||||
|
|
||||||
|
A "contributor" is a copyright holder who authorizes use under this
|
||||||
|
License of the Program or a work on which the Program is based. The
|
||||||
|
work thus licensed is called the contributor's "contributor version".
|
||||||
|
|
||||||
|
A contributor's "essential patent claims" are all patent claims
|
||||||
|
owned or controlled by the contributor, whether already acquired or
|
||||||
|
hereafter acquired, that would be infringed by some manner, permitted
|
||||||
|
by this License, of making, using, or selling its contributor version,
|
||||||
|
but do not include claims that would be infringed only as a
|
||||||
|
consequence of further modification of the contributor version. For
|
||||||
|
purposes of this definition, "control" includes the right to grant
|
||||||
|
patent sublicenses in a manner consistent with the requirements of
|
||||||
|
this License.
|
||||||
|
|
||||||
|
Each contributor grants you a non-exclusive, worldwide, royalty-free
|
||||||
|
patent license under the contributor's essential patent claims, to
|
||||||
|
make, use, sell, offer for sale, import and otherwise run, modify and
|
||||||
|
propagate the contents of its contributor version.
|
||||||
|
|
||||||
|
In the following three paragraphs, a "patent license" is any express
|
||||||
|
agreement or commitment, however denominated, not to enforce a patent
|
||||||
|
(such as an express permission to practice a patent or covenant not to
|
||||||
|
sue for patent infringement). To "grant" such a patent license to a
|
||||||
|
party means to make such an agreement or commitment not to enforce a
|
||||||
|
patent against the party.
|
||||||
|
|
||||||
|
If you convey a covered work, knowingly relying on a patent license,
|
||||||
|
and the Corresponding Source of the work is not available for anyone
|
||||||
|
to copy, free of charge and under the terms of this License, through a
|
||||||
|
publicly available network server or other readily accessible means,
|
||||||
|
then you must either (1) cause the Corresponding Source to be so
|
||||||
|
available, or (2) arrange to deprive yourself of the benefit of the
|
||||||
|
patent license for this particular work, or (3) arrange, in a manner
|
||||||
|
consistent with the requirements of this License, to extend the patent
|
||||||
|
license to downstream recipients. "Knowingly relying" means you have
|
||||||
|
actual knowledge that, but for the patent license, your conveying the
|
||||||
|
covered work in a country, or your recipient's use of the covered work
|
||||||
|
in a country, would infringe one or more identifiable patents in that
|
||||||
|
country that you have reason to believe are valid.
|
||||||
|
|
||||||
|
If, pursuant to or in connection with a single transaction or
|
||||||
|
arrangement, you convey, or propagate by procuring conveyance of, a
|
||||||
|
covered work, and grant a patent license to some of the parties
|
||||||
|
receiving the covered work authorizing them to use, propagate, modify
|
||||||
|
or convey a specific copy of the covered work, then the patent license
|
||||||
|
you grant is automatically extended to all recipients of the covered
|
||||||
|
work and works based on it.
|
||||||
|
|
||||||
|
A patent license is "discriminatory" if it does not include within
|
||||||
|
the scope of its coverage, prohibits the exercise of, or is
|
||||||
|
conditioned on the non-exercise of one or more of the rights that are
|
||||||
|
specifically granted under this License. You may not convey a covered
|
||||||
|
work if you are a party to an arrangement with a third party that is
|
||||||
|
in the business of distributing software, under which you make payment
|
||||||
|
to the third party based on the extent of your activity of conveying
|
||||||
|
the work, and under which the third party grants, to any of the
|
||||||
|
parties who would receive the covered work from you, a discriminatory
|
||||||
|
patent license (a) in connection with copies of the covered work
|
||||||
|
conveyed by you (or copies made from those copies), or (b) primarily
|
||||||
|
for and in connection with specific products or compilations that
|
||||||
|
contain the covered work, unless you entered into that arrangement,
|
||||||
|
or that patent license was granted, prior to 28 March 2007.
|
||||||
|
|
||||||
|
Nothing in this License shall be construed as excluding or limiting
|
||||||
|
any implied license or other defenses to infringement that may
|
||||||
|
otherwise be available to you under applicable patent law.
|
||||||
|
|
||||||
|
12. No Surrender of Others' Freedom.
|
||||||
|
|
||||||
|
If conditions are imposed on you (whether by court order, agreement or
|
||||||
|
otherwise) that contradict the conditions of this License, they do not
|
||||||
|
excuse you from the conditions of this License. If you cannot convey a
|
||||||
|
covered work so as to satisfy simultaneously your obligations under this
|
||||||
|
License and any other pertinent obligations, then as a consequence you may
|
||||||
|
not convey it at all. For example, if you agree to terms that obligate you
|
||||||
|
to collect a royalty for further conveying from those to whom you convey
|
||||||
|
the Program, the only way you could satisfy both those terms and this
|
||||||
|
License would be to refrain entirely from conveying the Program.
|
||||||
|
|
||||||
|
13. Use with the GNU Affero General Public License.
|
||||||
|
|
||||||
|
Notwithstanding any other provision of this License, you have
|
||||||
|
permission to link or combine any covered work with a work licensed
|
||||||
|
under version 3 of the GNU Affero General Public License into a single
|
||||||
|
combined work, and to convey the resulting work. The terms of this
|
||||||
|
License will continue to apply to the part which is the covered work,
|
||||||
|
but the special requirements of the GNU Affero General Public License,
|
||||||
|
section 13, concerning interaction through a network will apply to the
|
||||||
|
combination as such.
|
||||||
|
|
||||||
|
14. Revised Versions of this License.
|
||||||
|
|
||||||
|
The Free Software Foundation may publish revised and/or new versions of
|
||||||
|
the GNU General Public License from time to time. Such new versions will
|
||||||
|
be similar in spirit to the present version, but may differ in detail to
|
||||||
|
address new problems or concerns.
|
||||||
|
|
||||||
|
Each version is given a distinguishing version number. If the
|
||||||
|
Program specifies that a certain numbered version of the GNU General
|
||||||
|
Public License "or any later version" applies to it, you have the
|
||||||
|
option of following the terms and conditions either of that numbered
|
||||||
|
version or of any later version published by the Free Software
|
||||||
|
Foundation. If the Program does not specify a version number of the
|
||||||
|
GNU General Public License, you may choose any version ever published
|
||||||
|
by the Free Software Foundation.
|
||||||
|
|
||||||
|
If the Program specifies that a proxy can decide which future
|
||||||
|
versions of the GNU General Public License can be used, that proxy's
|
||||||
|
public statement of acceptance of a version permanently authorizes you
|
||||||
|
to choose that version for the Program.
|
||||||
|
|
||||||
|
Later license versions may give you additional or different
|
||||||
|
permissions. However, no additional obligations are imposed on any
|
||||||
|
author or copyright holder as a result of your choosing to follow a
|
||||||
|
later version.
|
||||||
|
|
||||||
|
15. Disclaimer of Warranty.
|
||||||
|
|
||||||
|
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
|
||||||
|
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
|
||||||
|
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
|
||||||
|
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
|
||||||
|
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
||||||
|
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
|
||||||
|
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
|
||||||
|
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
|
||||||
|
|
||||||
|
16. Limitation of Liability.
|
||||||
|
|
||||||
|
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
|
||||||
|
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
|
||||||
|
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
|
||||||
|
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
|
||||||
|
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
|
||||||
|
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
|
||||||
|
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
|
||||||
|
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
|
||||||
|
SUCH DAMAGES.
|
||||||
|
|
||||||
|
17. Interpretation of Sections 15 and 16.
|
||||||
|
|
||||||
|
If the disclaimer of warranty and limitation of liability provided
|
||||||
|
above cannot be given local legal effect according to their terms,
|
||||||
|
reviewing courts shall apply local law that most closely approximates
|
||||||
|
an absolute waiver of all civil liability in connection with the
|
||||||
|
Program, unless a warranty or assumption of liability accompanies a
|
||||||
|
copy of the Program in return for a fee.
|
||||||
|
|
||||||
|
END OF TERMS AND CONDITIONS
|
||||||
|
|
||||||
|
How to Apply These Terms to Your New Programs
|
||||||
|
|
||||||
|
If you develop a new program, and you want it to be of the greatest
|
||||||
|
possible use to the public, the best way to achieve this is to make it
|
||||||
|
free software which everyone can redistribute and change under these terms.
|
||||||
|
|
||||||
|
To do so, attach the following notices to the program. It is safest
|
||||||
|
to attach them to the start of each source file to most effectively
|
||||||
|
state the exclusion of warranty; and each file should have at least
|
||||||
|
the "copyright" line and a pointer to where the full notice is found.
|
||||||
|
|
||||||
|
<one line to give the program's name and a brief idea of what it does.>
|
||||||
|
Copyright (C) <year> <name of author>
|
||||||
|
|
||||||
|
This program is free software: you can redistribute it and/or modify
|
||||||
|
it under the terms of the GNU General Public License as published by
|
||||||
|
the Free Software Foundation, either version 3 of the License, or
|
||||||
|
(at your option) any later version.
|
||||||
|
|
||||||
|
This program is distributed in the hope that it will be useful,
|
||||||
|
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
GNU General Public License for more details.
|
||||||
|
|
||||||
|
You should have received a copy of the GNU General Public License
|
||||||
|
along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
Also add information on how to contact you by electronic and paper mail.
|
||||||
|
|
||||||
|
If the program does terminal interaction, make it output a short
|
||||||
|
notice like this when it starts in an interactive mode:
|
||||||
|
|
||||||
|
<program> Copyright (C) <year> <name of author>
|
||||||
|
This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
|
||||||
|
This is free software, and you are welcome to redistribute it
|
||||||
|
under certain conditions; type `show c' for details.
|
||||||
|
|
||||||
|
The hypothetical commands `show w' and `show c' should show the appropriate
|
||||||
|
parts of the General Public License. Of course, your program's commands
|
||||||
|
might be different; for a GUI interface, you would use an "about box".
|
||||||
|
|
||||||
|
You should also get your employer (if you work as a programmer) or school,
|
||||||
|
if any, to sign a "copyright disclaimer" for the program, if necessary.
|
||||||
|
For more information on this, and how to apply and follow the GNU GPL, see
|
||||||
|
<https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
The GNU General Public License does not permit incorporating your program
|
||||||
|
into proprietary programs. If your program is a subroutine library, you
|
||||||
|
may consider it more useful to permit linking proprietary applications with
|
||||||
|
the library. If this is what you want to do, use the GNU Lesser General
|
||||||
|
Public License instead of this License. But first, please read
|
||||||
|
<https://www.gnu.org/licenses/why-not-lgpl.html>.
|
75
README.md
Normal file
75
README.md
Normal file
|
@ -0,0 +1,75 @@
|
||||||
|
# Omnisearch for Obsidian
|
||||||
|
|
||||||
|
[](https://github.com/sponsors/scambier)
|
||||||
|

|
||||||
|

|
||||||
|

|
||||||
|

|
||||||
|
|
||||||
|
> 🏆 Winner of the _[2023 Gems of the Year](https://obsidian.md/blog/2023-goty-winners/)_ in the "Existing plugin" category 🏆
|
||||||
|
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
**Omnisearch** is a search engine that "_just works_".
|
||||||
|
It always instantly shows you the most relevant results, thanks to its smart weighting algorithm.
|
||||||
|
|
||||||
|
Under the hood, it uses the excellent [MiniSearch](https://github.com/lucaong/minisearch) library. This free plugin is totally unrelated to the omnisearch.ai paid product.
|
||||||
|
|
||||||
|

|
||||||
|
|
||||||
|
## Documentation
|
||||||
|
|
||||||
|
https://publish.obsidian.md/omnisearch/Index
|
||||||
|
|
||||||
|
## Installation
|
||||||
|
|
||||||
|
- Omnisearch is available on [the official Community Plugins repository](https://obsidian.md/plugins?search=Omnisearch).
|
||||||
|
- Beta releases can be installed through [BRAT](https://github.com/TfTHacker/obsidian42-brat). **Be advised that those
|
||||||
|
versions can be buggy and break things.**
|
||||||
|
|
||||||
|
You can check the [CHANGELOG](./CHANGELOG.md) for more information on the different versions.
|
||||||
|
|
||||||
|
## Features
|
||||||
|
|
||||||
|
> Omnisearch's first goal is to _locate_ files instantly. You can see it as a _Quick Switcher_ on steroids.
|
||||||
|
|
||||||
|
- Find your **📝notes, 📄Office documents, 📄PDFs, and 🖼images** faster than ever
|
||||||
|
- Images, documents, and PDF indexing is available
|
||||||
|
through [Text Extractor](https://github.com/scambier/obsidian-text-extractor)
|
||||||
|
- Automatic document scoring using
|
||||||
|
the [BM25 algorithm](https://github.com/lucaong/minisearch/issues/129#issuecomment-1046257399)
|
||||||
|
- The relevance of a document against a query depends on the number of times the query terms appear in the document,
|
||||||
|
its filename, and its headings
|
||||||
|
- Keyboard first: you never have to use your mouse
|
||||||
|
- Workflow similar to the "Quick Switcher" core plugin
|
||||||
|
- Opt-in local HTTP server to query Omnisearch from outside of Obsidian
|
||||||
|
- Resistance to typos
|
||||||
|
- Switch between Vault and In-file search to quickly skim multiple results in a single note
|
||||||
|
- Supports `"expressions in quotes"` and `-exclusions`
|
||||||
|
- Filters file types with `.jpg` or `.md`
|
||||||
|
- Directly Insert a `[[link]]` from the search results
|
||||||
|
- Supports Vim navigation keys
|
||||||
|
|
||||||
|
**Note:** support of Chinese depends
|
||||||
|
on [this additional plugin](https://github.com/aidenlx/cm-chs-patch) (also you may need to clear search cache data to apply new Chinese index). Please read its documentation for more
|
||||||
|
information.
|
||||||
|
|
||||||
|
## Projects that use Omnisearch
|
||||||
|
|
||||||
|
_Submit a PR to add your own project!_
|
||||||
|
|
||||||
|
- [Omnisearch Companion](https://github.com/ALegendsTale/omnisearch-companion), an extension for your browser ([Firefox](https://addons.mozilla.org/en-US/firefox/addon/omnisearch-companion/), [Chrome](https://chromewebstore.google.com/detail/omnisearch-companion/kcjcnnlpfbilodfnnkpioijobpjhokkd))
|
||||||
|
- [Actions for Obsidian](https://actions.work/actions-for-obsidian)
|
||||||
|
- [Userscripts](https://publish.obsidian.md/omnisearch/Inject+Omnisearch+results+into+your+search+engine) to inject Omnisearch into your favorite web search engine
|
||||||
|
|
||||||
|
## LICENSE
|
||||||
|
|
||||||
|
Omnisearch is licensed under [GPL-3](https://tldrlegal.com/license/gnu-general-public-license-v3-(gpl-3)).
|
||||||
|
|
||||||
|
## Thanks
|
||||||
|
|
||||||
|
To all people who donate through [Ko-Fi](https://ko-fi.com/scambier)
|
||||||
|
or [Github Sponsors](https://github.com/sponsors/scambier) ❤
|
||||||
|
|
||||||
|

|
135
assets/styles.css
Normal file
135
assets/styles.css
Normal file
|
@ -0,0 +1,135 @@
|
||||||
|
.omnisearch-modal {
|
||||||
|
}
|
||||||
|
|
||||||
|
.omnisearch-result {
|
||||||
|
white-space: normal;
|
||||||
|
display: flex;
|
||||||
|
flex-direction: row;
|
||||||
|
/* justify-content: space-between; */
|
||||||
|
flex-wrap: nowrap;
|
||||||
|
}
|
||||||
|
|
||||||
|
.omnisearch-result__title-container {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: space-between;
|
||||||
|
column-gap: 5px;
|
||||||
|
flex-wrap: wrap;
|
||||||
|
}
|
||||||
|
|
||||||
|
.omnisearch-result__title {
|
||||||
|
white-space: pre-wrap;
|
||||||
|
align-items: center;
|
||||||
|
display: flex;
|
||||||
|
gap: 5px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.omnisearch-result__title > span {
|
||||||
|
}
|
||||||
|
|
||||||
|
.omnisearch-result__folder-path {
|
||||||
|
font-size: 0.75rem;
|
||||||
|
align-items: center;
|
||||||
|
display: flex;
|
||||||
|
gap: 5px;
|
||||||
|
color: var(--text-muted);
|
||||||
|
}
|
||||||
|
|
||||||
|
.omnisearch-result__extension {
|
||||||
|
font-size: 0.7rem;
|
||||||
|
color: var(--text-muted);
|
||||||
|
}
|
||||||
|
|
||||||
|
.omnisearch-result__counter {
|
||||||
|
font-size: 0.7rem;
|
||||||
|
color: var(--text-muted);
|
||||||
|
}
|
||||||
|
|
||||||
|
.omnisearch-result__body {
|
||||||
|
white-space: normal;
|
||||||
|
font-size: small;
|
||||||
|
word-wrap: normal;
|
||||||
|
|
||||||
|
overflow: hidden;
|
||||||
|
display: -webkit-box;
|
||||||
|
-webkit-line-clamp: 3;
|
||||||
|
-webkit-box-orient: vertical;
|
||||||
|
|
||||||
|
color: var(--text-muted);
|
||||||
|
margin-inline-start: 0.5em;
|
||||||
|
}
|
||||||
|
|
||||||
|
.omnisearch-result__embed {
|
||||||
|
margin-left: 1em;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
.omnisearch-result__image-container {
|
||||||
|
flex-basis: 20%;
|
||||||
|
text-align: end;
|
||||||
|
}
|
||||||
|
|
||||||
|
.omnisearch-highlight {
|
||||||
|
}
|
||||||
|
|
||||||
|
.omnisearch-default-highlight {
|
||||||
|
text-decoration: underline;
|
||||||
|
text-decoration-color: var(--text-highlight-bg);
|
||||||
|
text-decoration-thickness: 3px;
|
||||||
|
text-underline-offset: -1px;
|
||||||
|
text-decoration-skip-ink: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
.omnisearch-input-container {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
flex-direction: row;
|
||||||
|
gap: 5px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.omnisearch-result__icon {
|
||||||
|
display: inline-block;
|
||||||
|
vertical-align: middle;
|
||||||
|
width: 16px;
|
||||||
|
height: 16px;
|
||||||
|
margin-right: 4px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.omnisearch-result__icon svg {
|
||||||
|
width: 100%;
|
||||||
|
height: 100%;
|
||||||
|
}
|
||||||
|
|
||||||
|
.omnisearch-result__icon--emoji {
|
||||||
|
font-size: 16px;
|
||||||
|
vertical-align: middle;
|
||||||
|
margin-right: 4px;
|
||||||
|
}
|
||||||
|
|
||||||
|
@media only screen and (max-width: 600px) {
|
||||||
|
.omnisearch-input-container {
|
||||||
|
flex-direction: column;
|
||||||
|
}
|
||||||
|
|
||||||
|
.omnisearch-input-container__buttons {
|
||||||
|
display: flex;
|
||||||
|
flex-direction: row;
|
||||||
|
width: 100%;
|
||||||
|
padding: 0 1em 0 1em;
|
||||||
|
gap: 1em;
|
||||||
|
}
|
||||||
|
.omnisearch-input-container__buttons > button {
|
||||||
|
flex-grow: 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@media only screen and (min-width: 600px) {
|
||||||
|
.omnisearch-input-container__buttons {
|
||||||
|
margin-inline-end: 1em;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
.omnisearch-input-field {
|
||||||
|
position: relative;
|
||||||
|
flex-grow: 1;
|
||||||
|
}
|
12
cliff.toml
Normal file
12
cliff.toml
Normal file
|
@ -0,0 +1,12 @@
|
||||||
|
[changelog]
|
||||||
|
header = "Changelog"
|
||||||
|
body = """
|
||||||
|
{% for group, commits in commits | group_by(attribute="group") %}
|
||||||
|
### {{ group | upper_first }}
|
||||||
|
{% for commit in commits %}
|
||||||
|
- {{ commit.message | upper_first }}
|
||||||
|
{% endfor %}
|
||||||
|
{% endfor %}
|
||||||
|
"""
|
||||||
|
trim = true
|
||||||
|
footer = "<!-- generated by git-cliff -->"
|
65
esbuild.config.mjs
Normal file
65
esbuild.config.mjs
Normal file
|
@ -0,0 +1,65 @@
|
||||||
|
import esbuild from 'esbuild'
|
||||||
|
import process from 'process'
|
||||||
|
import builtins from 'builtin-modules'
|
||||||
|
import esbuildSvelte from 'esbuild-svelte'
|
||||||
|
import { sveltePreprocess } from 'svelte-preprocess'
|
||||||
|
import path from 'path'
|
||||||
|
import { copy } from 'esbuild-plugin-copy'
|
||||||
|
|
||||||
|
const banner = `/*
|
||||||
|
THIS IS A GENERATED/BUNDLED FILE BY ESBUILD
|
||||||
|
if you want to view the source, please visit the github repository of this plugin
|
||||||
|
*/
|
||||||
|
`
|
||||||
|
|
||||||
|
const prod = process.argv[2] === 'production'
|
||||||
|
|
||||||
|
const context = await esbuild.context({
|
||||||
|
banner: {
|
||||||
|
js: banner,
|
||||||
|
},
|
||||||
|
entryPoints: ['./src/main.ts'],
|
||||||
|
bundle: true,
|
||||||
|
external: [
|
||||||
|
'obsidian',
|
||||||
|
'electron',
|
||||||
|
'@codemirror/autocomplete',
|
||||||
|
'@codemirror/collab',
|
||||||
|
'@codemirror/commands',
|
||||||
|
'@codemirror/language',
|
||||||
|
'@codemirror/lint',
|
||||||
|
'@codemirror/search',
|
||||||
|
'@codemirror/state',
|
||||||
|
'@codemirror/view',
|
||||||
|
'@lezer/common',
|
||||||
|
'@lezer/highlight',
|
||||||
|
'@lezer/lr',
|
||||||
|
...builtins,
|
||||||
|
],
|
||||||
|
outfile: path.join('./dist', 'main.js'),
|
||||||
|
plugins: [
|
||||||
|
esbuildSvelte({
|
||||||
|
compilerOptions: { css: 'injected' },
|
||||||
|
preprocess: sveltePreprocess(),
|
||||||
|
}),
|
||||||
|
copy({
|
||||||
|
assets:{
|
||||||
|
from: ['manifest.json','./assets/styles.css'],
|
||||||
|
to: ['./']
|
||||||
|
}
|
||||||
|
})
|
||||||
|
],
|
||||||
|
format: 'cjs',
|
||||||
|
target: 'chrome98',
|
||||||
|
logLevel: 'info',
|
||||||
|
sourcemap: prod ? false : 'inline',
|
||||||
|
treeShaking: true,
|
||||||
|
minify: prod,
|
||||||
|
})
|
||||||
|
|
||||||
|
if (prod) {
|
||||||
|
await context.rebuild()
|
||||||
|
process.exit(0)
|
||||||
|
} else {
|
||||||
|
await context.watch()
|
||||||
|
}
|
BIN
images/omnisearch.gif
Normal file
BIN
images/omnisearch.gif
Normal file
Binary file not shown.
After Width: | Height: | Size: 475 KiB |
198
jest.config.js
Normal file
198
jest.config.js
Normal file
|
@ -0,0 +1,198 @@
|
||||||
|
/*
|
||||||
|
* For a detailed explanation regarding each configuration property, visit:
|
||||||
|
* https://jestjs.io/docs/configuration
|
||||||
|
*/
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
// All imported modules in your tests should be mocked automatically
|
||||||
|
// automock: false,
|
||||||
|
|
||||||
|
// Stop running tests after `n` failures
|
||||||
|
// bail: 0,
|
||||||
|
|
||||||
|
// The directory where Jest should store its cached dependency information
|
||||||
|
// cacheDirectory: "C:\\Users\\cambi\\AppData\\Local\\Temp\\jest",
|
||||||
|
|
||||||
|
// Automatically clear mock calls, instances and results before every test
|
||||||
|
clearMocks: true,
|
||||||
|
|
||||||
|
// Indicates whether the coverage information should be collected while executing the test
|
||||||
|
collectCoverage: true,
|
||||||
|
|
||||||
|
// An array of glob patterns indicating a set of files for which coverage information should be collected
|
||||||
|
// collectCoverageFrom: undefined,
|
||||||
|
|
||||||
|
// The directory where Jest should output its coverage files
|
||||||
|
coverageDirectory: 'coverage',
|
||||||
|
|
||||||
|
// An array of regexp pattern strings used to skip coverage collection
|
||||||
|
// coveragePathIgnorePatterns: [
|
||||||
|
// "\\\\node_modules\\\\"
|
||||||
|
// ],
|
||||||
|
|
||||||
|
// Indicates which provider should be used to instrument code for coverage
|
||||||
|
coverageProvider: 'v8',
|
||||||
|
|
||||||
|
// A list of reporter names that Jest uses when writing coverage reports
|
||||||
|
// coverageReporters: [
|
||||||
|
// "json",
|
||||||
|
// "text",
|
||||||
|
// "lcov",
|
||||||
|
// "clover"
|
||||||
|
// ],
|
||||||
|
|
||||||
|
// An object that configures minimum threshold enforcement for coverage results
|
||||||
|
// coverageThreshold: undefined,
|
||||||
|
|
||||||
|
// A path to a custom dependency extractor
|
||||||
|
// dependencyExtractor: undefined,
|
||||||
|
|
||||||
|
// Make calling deprecated APIs throw helpful error messages
|
||||||
|
// errorOnDeprecated: false,
|
||||||
|
|
||||||
|
// Force coverage collection from ignored files using an array of glob patterns
|
||||||
|
// forceCoverageMatch: [],
|
||||||
|
|
||||||
|
// A path to a module which exports an async function that is triggered once before all test suites
|
||||||
|
// globalSetup: undefined,
|
||||||
|
|
||||||
|
// A path to a module which exports an async function that is triggered once after all test suites
|
||||||
|
// globalTeardown: undefined,
|
||||||
|
|
||||||
|
// A set of global variables that need to be available in all test environments
|
||||||
|
// globals: {},
|
||||||
|
|
||||||
|
// The maximum amount of workers used to run your tests. Can be specified as % or a number. E.g. maxWorkers: 10% will use 10% of your CPU amount + 1 as the maximum worker number. maxWorkers: 2 will use a maximum of 2 workers.
|
||||||
|
// maxWorkers: "50%",
|
||||||
|
|
||||||
|
// An array of directory names to be searched recursively up from the requiring module's location
|
||||||
|
// moduleDirectories: [
|
||||||
|
// "node_modules"
|
||||||
|
// ],
|
||||||
|
|
||||||
|
// An array of file extensions your modules use
|
||||||
|
moduleFileExtensions: [
|
||||||
|
'ts',
|
||||||
|
'svelte',
|
||||||
|
'js',
|
||||||
|
// "jsx",
|
||||||
|
// "tsx",
|
||||||
|
// "json",
|
||||||
|
// "node"
|
||||||
|
],
|
||||||
|
|
||||||
|
// A map from regular expressions to module names or to arrays of module names that allow to stub out resources with a single module
|
||||||
|
// moduleNameMapper: {},
|
||||||
|
|
||||||
|
// An array of regexp pattern strings, matched against all module paths before considered 'visible' to the module loader
|
||||||
|
// modulePathIgnorePatterns: [],
|
||||||
|
|
||||||
|
// Activates notifications for test results
|
||||||
|
// notify: false,
|
||||||
|
|
||||||
|
// An enum that specifies notification mode. Requires { notify: true }
|
||||||
|
// notifyMode: "failure-change",
|
||||||
|
|
||||||
|
// A preset that is used as a base for Jest's configuration
|
||||||
|
// preset: undefined,
|
||||||
|
|
||||||
|
// Run tests from one or more projects
|
||||||
|
// projects: undefined,
|
||||||
|
|
||||||
|
// Use this configuration option to add custom reporters to Jest
|
||||||
|
// reporters: undefined,
|
||||||
|
|
||||||
|
// Automatically reset mock state before every test
|
||||||
|
// resetMocks: false,
|
||||||
|
|
||||||
|
// Reset the module registry before running each individual test
|
||||||
|
// resetModules: false,
|
||||||
|
|
||||||
|
// A path to a custom resolver
|
||||||
|
// resolver: undefined,
|
||||||
|
|
||||||
|
// Automatically restore mock state and implementation before every test
|
||||||
|
// restoreMocks: false,
|
||||||
|
|
||||||
|
// The root directory that Jest should scan for tests and modules within
|
||||||
|
// rootDir: undefined,
|
||||||
|
|
||||||
|
// A list of paths to directories that Jest should use to search for files in
|
||||||
|
// roots: [
|
||||||
|
// "<rootDir>"
|
||||||
|
// ],
|
||||||
|
|
||||||
|
// Allows you to use a custom runner instead of Jest's default test runner
|
||||||
|
// runner: "jest-runner",
|
||||||
|
|
||||||
|
// The paths to modules that run some code to configure or set up the testing environment before each test
|
||||||
|
// setupFiles: [],
|
||||||
|
|
||||||
|
// A list of paths to modules that run some code to configure or set up the testing framework before each test
|
||||||
|
// setupFilesAfterEnv: [],
|
||||||
|
|
||||||
|
// The number of seconds after which a test is considered as slow and reported as such in the results.
|
||||||
|
// slowTestThreshold: 5,
|
||||||
|
|
||||||
|
// A list of paths to snapshot serializer modules Jest should use for snapshot testing
|
||||||
|
// snapshotSerializers: [],
|
||||||
|
|
||||||
|
// The test environment that will be used for testing
|
||||||
|
testEnvironment: 'jsdom',
|
||||||
|
|
||||||
|
// Options that will be passed to the testEnvironment
|
||||||
|
// testEnvironmentOptions: {},
|
||||||
|
|
||||||
|
// Adds a location field to test results
|
||||||
|
// testLocationInResults: false,
|
||||||
|
|
||||||
|
// The glob patterns Jest uses to detect test files
|
||||||
|
// testMatch: [
|
||||||
|
// "**/__tests__/**/*.[jt]s?(x)",
|
||||||
|
// "**/?(*.)+(spec|test).[tj]s?(x)"
|
||||||
|
// ],
|
||||||
|
|
||||||
|
// An array of regexp pattern strings that are matched against all test paths, matched tests are skipped
|
||||||
|
// testPathIgnorePatterns: [
|
||||||
|
// "\\\\node_modules\\\\"
|
||||||
|
// ],
|
||||||
|
|
||||||
|
// The regexp pattern or array of patterns that Jest uses to detect test files
|
||||||
|
// testRegex: [],
|
||||||
|
|
||||||
|
// This option allows the use of a custom results processor
|
||||||
|
// testResultsProcessor: undefined,
|
||||||
|
|
||||||
|
// This option allows use of a custom test runner
|
||||||
|
// testRunner: "jest-circus/runner",
|
||||||
|
|
||||||
|
// This option sets the URL for the jsdom environment. It is reflected in properties such as location.href
|
||||||
|
// testURL: "http://localhost",
|
||||||
|
|
||||||
|
// Setting this value to "fake" allows the use of fake timers for functions such as "setTimeout"
|
||||||
|
// timers: "real",
|
||||||
|
|
||||||
|
// A map from regular expressions to paths to transformers
|
||||||
|
transform: {
|
||||||
|
'^.+\\.(js|ts)$': 'babel-jest',
|
||||||
|
'^.+\\.svelte$': 'svelte-jester',
|
||||||
|
},
|
||||||
|
|
||||||
|
// An array of regexp pattern strings that are matched against all source file paths, matched files will skip transformation
|
||||||
|
// transformIgnorePatterns: [
|
||||||
|
// "\\\\node_modules\\\\",
|
||||||
|
// "\\.pnp\\.[^\\\\]+$"
|
||||||
|
// ],
|
||||||
|
|
||||||
|
// An array of regexp pattern strings that are matched against all modules before the module loader will automatically return a mock for them
|
||||||
|
// unmockedModulePathPatterns: undefined,
|
||||||
|
|
||||||
|
// Indicates whether each individual test should be reported during the run
|
||||||
|
// verbose: undefined,
|
||||||
|
|
||||||
|
// An array of regexp patterns that are matched against all source file paths before re-running tests in watch mode
|
||||||
|
// watchPathIgnorePatterns: [],
|
||||||
|
|
||||||
|
// Whether to use watchman for file crawling
|
||||||
|
// watchman: true,
|
||||||
|
}
|
14
manifest.json
Normal file
14
manifest.json
Normal file
|
@ -0,0 +1,14 @@
|
||||||
|
{
|
||||||
|
"id": "locator",
|
||||||
|
"name": "Locator",
|
||||||
|
"version": "0.1.0",
|
||||||
|
"minAppVersion": "1.7.2",
|
||||||
|
"description": "Locate your notes in a few keystrokes",
|
||||||
|
"author": "Simon Cambier",
|
||||||
|
"authorUrl": "https://github.com/scambier/obsidian-omnisearch",
|
||||||
|
"fundingUrl": {
|
||||||
|
"Github": "https://github.com/sponsors/scambier",
|
||||||
|
"Ko-fi": "https://ko-fi.com/scambier"
|
||||||
|
},
|
||||||
|
"isDesktopOnly": false
|
||||||
|
}
|
59
package.json
Normal file
59
package.json
Normal file
|
@ -0,0 +1,59 @@
|
||||||
|
{
|
||||||
|
"name": "scambier.obsidian-locator",
|
||||||
|
"version": "1.27.1",
|
||||||
|
"description": "Locate your notes in a few keystrokes",
|
||||||
|
"main": "dist/main.js",
|
||||||
|
"scripts": {
|
||||||
|
"dev": "node esbuild.config.mjs",
|
||||||
|
"build": "tsc -noEmit -skipLibCheck && node esbuild.config.mjs production",
|
||||||
|
"check": "svelte-check --tsconfig ./tsconfig.json && madge -c src/main.ts",
|
||||||
|
"version": "node version-bump.mjs",
|
||||||
|
"test": "jest"
|
||||||
|
},
|
||||||
|
"keywords": [],
|
||||||
|
"author": "Simon Cambier",
|
||||||
|
"license": "GPL-3",
|
||||||
|
"devDependencies": {
|
||||||
|
"@babel/preset-env": "^7.26.9",
|
||||||
|
"@babel/preset-typescript": "^7.26.0",
|
||||||
|
"@testing-library/jest-dom": "^5.17.0",
|
||||||
|
"@tsconfig/svelte": "^3.0.0",
|
||||||
|
"@types/jest": "^27.5.2",
|
||||||
|
"@types/lodash-es": "^4.17.12",
|
||||||
|
"@types/node": "^16.18.126",
|
||||||
|
"@types/pako": "^2.0.3",
|
||||||
|
"babel-jest": "^27.5.1",
|
||||||
|
"builtin-modules": "^3.3.0",
|
||||||
|
"esbuild": "0.17.19",
|
||||||
|
"esbuild-plugin-copy": "1.3.0",
|
||||||
|
"esbuild-svelte": "^0.9.2",
|
||||||
|
"jest": "^27.5.1",
|
||||||
|
"madge": "^8.0.0",
|
||||||
|
"obsidian": "1.7.2",
|
||||||
|
"prettier": "^2.8.8",
|
||||||
|
"prettier-plugin-svelte": "^2.10.1",
|
||||||
|
"svelte": "^5.23.2",
|
||||||
|
"svelte-check": "^4.1.5",
|
||||||
|
"svelte-jester": "^2.3.2",
|
||||||
|
"svelte-preprocess": "^6.0.3",
|
||||||
|
"tslib": "2.3.1",
|
||||||
|
"typescript": "^5.8.2",
|
||||||
|
"vite": "^3.2.11"
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"cancelable-promise": "^4.3.1",
|
||||||
|
"dexie": "^4.0.11",
|
||||||
|
"lodash-es": "4.17.21",
|
||||||
|
"markdown-link-extractor": "^4.0.2",
|
||||||
|
"minisearch": "7.1.0",
|
||||||
|
"pure-md5": "^0.1.14",
|
||||||
|
"search-query-parser": "^1.6.0",
|
||||||
|
"svelte-multiselect": "github:janosh/svelte-multiselect"
|
||||||
|
},
|
||||||
|
"pnpm": {
|
||||||
|
"overrides": {
|
||||||
|
"moment@>=2.18.0 <2.29.4": ">=2.29.4"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"packageManager": "pnpm@9.1.0+sha512.67f5879916a9293e5cf059c23853d571beaf4f753c707f40cb22bed5fb1578c6aad3b6c4107ccb3ba0b35be003eb621a16471ac836c87beb53f9d54bb4612724"
|
||||||
|
}
|
6609
pnpm-lock.yaml
Normal file
6609
pnpm-lock.yaml
Normal file
File diff suppressed because it is too large
Load Diff
29
qodana.yaml
Normal file
29
qodana.yaml
Normal file
|
@ -0,0 +1,29 @@
|
||||||
|
#-------------------------------------------------------------------------------#
|
||||||
|
# Qodana analysis is configured by qodana.yaml file #
|
||||||
|
# https://www.jetbrains.com/help/qodana/qodana-yaml.html #
|
||||||
|
#-------------------------------------------------------------------------------#
|
||||||
|
version: "1.0"
|
||||||
|
|
||||||
|
#Specify inspection profile for code analysis
|
||||||
|
profile:
|
||||||
|
name: qodana.starter
|
||||||
|
|
||||||
|
#Enable inspections
|
||||||
|
#include:
|
||||||
|
# - name: <SomeEnabledInspectionId>
|
||||||
|
|
||||||
|
#Disable inspections
|
||||||
|
#exclude:
|
||||||
|
# - name: <SomeDisabledInspectionId>
|
||||||
|
# paths:
|
||||||
|
# - <path/where/not/run/inspection>
|
||||||
|
|
||||||
|
#Execute shell command before Qodana execution (Applied in CI/CD pipeline)
|
||||||
|
#bootstrap: sh ./prepare-qodana.sh
|
||||||
|
|
||||||
|
#Install IDE plugins before Qodana execution (Applied in CI/CD pipeline)
|
||||||
|
#plugins:
|
||||||
|
# - id: <plugin.id> #(plugin id can be found at https://plugins.jetbrains.com)
|
||||||
|
|
||||||
|
#Specify Qodana linter for analysis (Applied in CI/CD pipeline)
|
||||||
|
linter: jetbrains/qodana-js:latest
|
123
src/__tests__/event-bus-tests.ts
Normal file
123
src/__tests__/event-bus-tests.ts
Normal file
|
@ -0,0 +1,123 @@
|
||||||
|
import { EventBus } from '../tools/event-bus'
|
||||||
|
|
||||||
|
describe('EventBus', () => {
|
||||||
|
it('should refuse the registering of invalid ctx/event names', () => {
|
||||||
|
const eventBus = new EventBus()
|
||||||
|
expect(() => eventBus.on('@', 'event', () => {})).toThrowError(
|
||||||
|
'Invalid context/event name - Cannot contain @'
|
||||||
|
)
|
||||||
|
expect(() => eventBus.on('context', '@', () => {})).toThrowError(
|
||||||
|
'Invalid context/event name - Cannot contain @'
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should emit different events to the same context', () => {
|
||||||
|
// Arrange
|
||||||
|
const bus = new EventBus()
|
||||||
|
const cb = jest.fn()
|
||||||
|
bus.on('context', 'event1', cb)
|
||||||
|
bus.on('context', 'event2', cb)
|
||||||
|
|
||||||
|
// Act
|
||||||
|
bus.emit('event1', 'PARAM_1')
|
||||||
|
bus.emit('event2', 'PARAM_2')
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
expect(cb).toHaveBeenCalledTimes(2)
|
||||||
|
expect(cb).toHaveBeenNthCalledWith(1, 'PARAM_1')
|
||||||
|
expect(cb).toHaveBeenNthCalledWith(2, 'PARAM_2')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should emit the same events to different contexts', () => {
|
||||||
|
// Arrange
|
||||||
|
const bus = new EventBus()
|
||||||
|
const cb1 = jest.fn()
|
||||||
|
const cb2 = jest.fn()
|
||||||
|
bus.on('context1', 'event', cb1)
|
||||||
|
bus.on('context2', 'event', cb2)
|
||||||
|
|
||||||
|
// Act
|
||||||
|
bus.emit('event', 'PARAM_1')
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
expect(cb1).toHaveBeenCalledTimes(1)
|
||||||
|
expect(cb1).toHaveBeenNthCalledWith(1, 'PARAM_1')
|
||||||
|
expect(cb2).toHaveBeenCalledTimes(1)
|
||||||
|
expect(cb2).toHaveBeenNthCalledWith(1, 'PARAM_1')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should forward multiple arguments', () => {
|
||||||
|
// Arrange
|
||||||
|
const bus = new EventBus()
|
||||||
|
const cb = jest.fn()
|
||||||
|
bus.on('context', 'event', cb)
|
||||||
|
|
||||||
|
// Act
|
||||||
|
bus.emit('event', 'foo', 'bar')
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
expect(cb).toHaveBeenCalledWith('foo', 'bar')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should not emit events for disabled contexts', () => {
|
||||||
|
// Arrange
|
||||||
|
const bus = new EventBus()
|
||||||
|
const cb = jest.fn()
|
||||||
|
bus.on('context', 'event', cb)
|
||||||
|
bus.disable('context')
|
||||||
|
|
||||||
|
// Act
|
||||||
|
bus.emit('event', 'foo', 'bar')
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
expect(cb).not.toHaveBeenCalled()
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should emit events for enabled contexts', () => {
|
||||||
|
// Arrange
|
||||||
|
const bus = new EventBus()
|
||||||
|
const cb = jest.fn()
|
||||||
|
bus.on('context', 'event', cb)
|
||||||
|
bus.disable('context')
|
||||||
|
bus.enable('context')
|
||||||
|
|
||||||
|
// Act
|
||||||
|
bus.emit('event', 'foo', 'bar')
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
expect(cb).toHaveBeenCalledWith('foo', 'bar')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should unregister contexts', () => {
|
||||||
|
// Arrange
|
||||||
|
const bus = new EventBus()
|
||||||
|
const cb = jest.fn()
|
||||||
|
bus.on('context1', 'event', cb)
|
||||||
|
bus.on('context2', 'event', cb)
|
||||||
|
bus.off('context1')
|
||||||
|
|
||||||
|
// Act
|
||||||
|
bus.emit('event', 'foo', 'bar')
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
expect(cb).toHaveBeenCalledTimes(1)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should unregister single events', () => {
|
||||||
|
// Arrange
|
||||||
|
const bus = new EventBus()
|
||||||
|
const cb1 = jest.fn()
|
||||||
|
const cb2 = jest.fn()
|
||||||
|
bus.on('context', 'event1', cb1)
|
||||||
|
bus.on('context', 'event2', cb2)
|
||||||
|
bus.off('context', 'event2')
|
||||||
|
|
||||||
|
// Act
|
||||||
|
bus.emit('event1')
|
||||||
|
bus.emit('event2')
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
expect(cb1).toHaveBeenCalled()
|
||||||
|
expect(cb2).not.toHaveBeenCalled()
|
||||||
|
})
|
||||||
|
})
|
52
src/__tests__/query-tests.ts
Normal file
52
src/__tests__/query-tests.ts
Normal file
|
@ -0,0 +1,52 @@
|
||||||
|
import { Query } from '../search/query'
|
||||||
|
|
||||||
|
describe('The Query class', () => {
|
||||||
|
const stringQuery =
|
||||||
|
"foo bar 'lorem ipsum' -baz dolor \"sit amet\" -'quoted exclusion'"
|
||||||
|
|
||||||
|
it('should correctly parse string queries', () => {
|
||||||
|
// Act
|
||||||
|
const query = new Query(stringQuery, {
|
||||||
|
ignoreDiacritics: true,
|
||||||
|
ignoreArabicDiacritics: true,
|
||||||
|
})
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
const segments = query.query.text
|
||||||
|
expect(segments).toHaveLength(5)
|
||||||
|
expect(segments).toContain('foo')
|
||||||
|
expect(segments).toContain('bar')
|
||||||
|
expect(segments).toContain('lorem ipsum')
|
||||||
|
expect(segments).toContain('dolor')
|
||||||
|
expect(segments).toContain('sit amet')
|
||||||
|
|
||||||
|
const exclusions = query.query.exclude.text
|
||||||
|
expect(exclusions).toHaveLength(2)
|
||||||
|
expect(exclusions).toContain('baz')
|
||||||
|
expect(exclusions).toContain('quoted exclusion')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should not exclude words when there is no space before', () => {
|
||||||
|
// Act
|
||||||
|
const query = new Query('foo bar-baz', {
|
||||||
|
ignoreDiacritics: true,
|
||||||
|
ignoreArabicDiacritics: true,
|
||||||
|
})
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
expect(query.query.exclude.text).toHaveLength(0)
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('.getExactTerms()', () => {
|
||||||
|
it('should an array of strings containg "exact" values', () => {
|
||||||
|
// Act
|
||||||
|
const query = new Query(stringQuery, {
|
||||||
|
ignoreDiacritics: true,
|
||||||
|
ignoreArabicDiacritics: true,
|
||||||
|
})
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
expect(query.getExactTerms()).toEqual(['lorem ipsum', 'sit amet'])
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
49
src/__tests__/utils-tests.ts
Normal file
49
src/__tests__/utils-tests.ts
Normal file
|
@ -0,0 +1,49 @@
|
||||||
|
import type { CachedMetadata } from 'obsidian'
|
||||||
|
import { getAliasesFromMetadata } from '../tools/utils'
|
||||||
|
|
||||||
|
describe('Utils', () => {
|
||||||
|
describe('getAliasesFromMetadata', () => {
|
||||||
|
it('should return an empty array if no metadata is provided', () => {
|
||||||
|
// Act
|
||||||
|
const actual = getAliasesFromMetadata(null)
|
||||||
|
// Assert
|
||||||
|
expect(actual).toEqual([])
|
||||||
|
})
|
||||||
|
it('should return an empty array if no aliases are provided', () => {
|
||||||
|
// Act
|
||||||
|
const actual = getAliasesFromMetadata({})
|
||||||
|
// Assert
|
||||||
|
expect(actual).toEqual([])
|
||||||
|
})
|
||||||
|
it('should return the aliases array as-is', () => {
|
||||||
|
// Arrange
|
||||||
|
const metadata = {
|
||||||
|
frontmatter: { aliases: ['foo', 'bar'] },
|
||||||
|
} as unknown as CachedMetadata
|
||||||
|
// Act
|
||||||
|
const actual = getAliasesFromMetadata(metadata)
|
||||||
|
// Assert
|
||||||
|
expect(actual).toEqual(['foo', 'bar'])
|
||||||
|
})
|
||||||
|
it('should convert the aliases string into an array', () => {
|
||||||
|
// Arrange
|
||||||
|
const metadata = {
|
||||||
|
frontmatter: { aliases: 'foo, bar' },
|
||||||
|
} as unknown as CachedMetadata
|
||||||
|
// Act
|
||||||
|
const actual = getAliasesFromMetadata(metadata)
|
||||||
|
// Assert
|
||||||
|
expect(actual).toEqual(['foo', 'bar'])
|
||||||
|
})
|
||||||
|
it('should return an empty array if the aliases field is an empty string', () => {
|
||||||
|
// Arrange
|
||||||
|
const metadata = {
|
||||||
|
frontmatter: { aliases: '' },
|
||||||
|
} as unknown as CachedMetadata
|
||||||
|
// Act
|
||||||
|
const actual = getAliasesFromMetadata(metadata)
|
||||||
|
// Assert
|
||||||
|
expect(actual).toEqual([])
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
9
src/components/GlyphAddNote.svelte
Normal file
9
src/components/GlyphAddNote.svelte
Normal file
|
@ -0,0 +1,9 @@
|
||||||
|
<script lang="ts"></script>
|
||||||
|
|
||||||
|
<span class="suggestion-flair" aria-label="Not created yet, select to create"
|
||||||
|
><svg viewBox="0 0 100 100" class="add-note-glyph" width="16" height="16"
|
||||||
|
><path
|
||||||
|
fill="currentColor"
|
||||||
|
stroke="currentColor"
|
||||||
|
d="M23.3,6.7c-3.7,0-6.7,3-6.7,6.7v73.3c0,3.7,3,6.7,6.7,6.7h28.4c-3.2-4.8-5.1-10.5-5.1-16.7c0-16.6,13.4-30,30-30 c2.3,0,4.5,0.3,6.7,0.8V31.7c0-0.9-0.3-1.7-1-2.4L60.7,7.6c-0.6-0.6-1.5-1-2.4-1L23.3,6.7z M56.7,13L77,33.3H60 c-1.8,0-3.3-1.5-3.3-3.3L56.7,13z M76.7,53.3c-12.9,0-23.3,10.4-23.3,23.3S63.8,100,76.7,100S100,89.6,100,76.7 S89.6,53.3,76.7,53.3z M76.7,63.3c1.8,0,3.3,1.5,3.3,3.3v6.7h6.7c1.8,0,3.3,1.5,3.3,3.3c0,1.8-1.5,3.3-3.3,3.3H80v6.7 c0,1.8-1.5,3.3-3.3,3.3c-1.8,0-3.3-1.5-3.3-3.3V80h-6.7c-1.8,0-3.3-1.5-3.3-3.3s1.5-3.3,3.3-3.3h6.7v-6.7 C73.3,64.8,74.8,63.3,76.7,63.3L76.7,63.3z" /></svg
|
||||||
|
></span>
|
66
src/components/InputSearch.svelte
Normal file
66
src/components/InputSearch.svelte
Normal file
|
@ -0,0 +1,66 @@
|
||||||
|
<script lang="ts">
|
||||||
|
import { debounce, Platform } from 'obsidian'
|
||||||
|
import { toggleInputComposition } from '../globals'
|
||||||
|
import { createEventDispatcher, tick } from 'svelte'
|
||||||
|
import type LocatorPlugin from '../main'
|
||||||
|
import { wait } from '../tools/utils'
|
||||||
|
|
||||||
|
export let initialValue = ''
|
||||||
|
export let placeholder = ''
|
||||||
|
export let plugin: LocatorPlugin
|
||||||
|
let initialSet = false
|
||||||
|
let value = ''
|
||||||
|
let elInput: HTMLInputElement
|
||||||
|
const dispatch = createEventDispatcher()
|
||||||
|
|
||||||
|
export function setInputValue(v: string): void {
|
||||||
|
value = v
|
||||||
|
}
|
||||||
|
|
||||||
|
function watchInitialValue(v: string): void {
|
||||||
|
if (v && !initialSet && !value) {
|
||||||
|
initialSet = true
|
||||||
|
value = v
|
||||||
|
selectInput()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
$: watchInitialValue(initialValue)
|
||||||
|
|
||||||
|
function selectInput(_?: HTMLElement): void {
|
||||||
|
tick()
|
||||||
|
.then(async () => {
|
||||||
|
if (Platform.isMobileApp) await wait(200)
|
||||||
|
elInput.focus()
|
||||||
|
return tick()
|
||||||
|
})
|
||||||
|
.then(async () => {
|
||||||
|
if (Platform.isMobileApp) await wait(200)
|
||||||
|
elInput.select()
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
const debouncedOnInput = debounce(() => {
|
||||||
|
// If typing a query and not executing it,
|
||||||
|
// the next time we open the modal, the search field will be empty
|
||||||
|
plugin.searchHistory.addToHistory('')
|
||||||
|
dispatch('input', value)
|
||||||
|
}, 300)
|
||||||
|
</script>
|
||||||
|
|
||||||
|
<div class="omnisearch-input-container">
|
||||||
|
<div class="omnisearch-input-field">
|
||||||
|
<input
|
||||||
|
bind:this="{elInput}"
|
||||||
|
bind:value="{value}"
|
||||||
|
class="prompt-input"
|
||||||
|
on:compositionend="{_ => toggleInputComposition(false)}"
|
||||||
|
on:compositionstart="{_ => toggleInputComposition(true)}"
|
||||||
|
on:input="{debouncedOnInput}"
|
||||||
|
placeholder="{placeholder}"
|
||||||
|
spellcheck="false"
|
||||||
|
type="text"
|
||||||
|
use:selectInput />
|
||||||
|
</div>
|
||||||
|
<slot />
|
||||||
|
</div>
|
6
src/components/ModalContainer.svelte
Normal file
6
src/components/ModalContainer.svelte
Normal file
|
@ -0,0 +1,6 @@
|
||||||
|
<script lang="ts">
|
||||||
|
</script>
|
||||||
|
|
||||||
|
<div class="prompt-results" on:mousedown={e => e.preventDefault()}>
|
||||||
|
<slot />
|
||||||
|
</div>
|
223
src/components/ModalInFile.svelte
Normal file
223
src/components/ModalInFile.svelte
Normal file
|
@ -0,0 +1,223 @@
|
||||||
|
<script lang="ts">
|
||||||
|
import InputSearch from './InputSearch.svelte'
|
||||||
|
import {
|
||||||
|
Action,
|
||||||
|
eventBus,
|
||||||
|
excerptAfter,
|
||||||
|
type ResultNote,
|
||||||
|
type SearchMatch,
|
||||||
|
} from '../globals'
|
||||||
|
import { getCtrlKeyLabel, loopIndex } from '../tools/utils'
|
||||||
|
import { onDestroy, onMount, tick } from 'svelte'
|
||||||
|
import { MarkdownView, Platform } from 'obsidian'
|
||||||
|
import ModalContainer from './ModalContainer.svelte'
|
||||||
|
import {
|
||||||
|
LocatorInFileModal,
|
||||||
|
LocatorVaultModal,
|
||||||
|
} from '../components/modals'
|
||||||
|
import ResultItemInFile from './ResultItemInFile.svelte'
|
||||||
|
import { Query } from '../search/query'
|
||||||
|
import { openNote } from '../tools/notes'
|
||||||
|
import type LocatorPlugin from '../main'
|
||||||
|
|
||||||
|
export let plugin: LocatorPlugin
|
||||||
|
export let modal: LocatorInFileModal
|
||||||
|
export let parent: LocatorVaultModal | null = null
|
||||||
|
export let singleFilePath = ''
|
||||||
|
export let previousQuery: string | undefined
|
||||||
|
|
||||||
|
let searchQuery: string
|
||||||
|
let groupedOffsets: number[] = []
|
||||||
|
let selectedIndex = 0
|
||||||
|
let note: ResultNote | undefined
|
||||||
|
let query: Query
|
||||||
|
|
||||||
|
$: searchQuery = previousQuery ?? ''
|
||||||
|
|
||||||
|
onMount(() => {
|
||||||
|
eventBus.enable('infile')
|
||||||
|
|
||||||
|
eventBus.on('infile', Action.Enter, openSelection)
|
||||||
|
eventBus.on('infile', Action.OpenInNewPane, openSelectionInNewTab)
|
||||||
|
eventBus.on('infile', Action.ArrowUp, () => moveIndex(-1))
|
||||||
|
eventBus.on('infile', Action.ArrowDown, () => moveIndex(1))
|
||||||
|
eventBus.on('infile', Action.Tab, switchToVaultModal)
|
||||||
|
})
|
||||||
|
|
||||||
|
onDestroy(() => {
|
||||||
|
eventBus.disable('infile')
|
||||||
|
})
|
||||||
|
|
||||||
|
$: (async () => {
|
||||||
|
if (searchQuery) {
|
||||||
|
query = new Query(searchQuery, {
|
||||||
|
ignoreDiacritics: plugin.settings.ignoreDiacritics,
|
||||||
|
ignoreArabicDiacritics: plugin.settings.ignoreArabicDiacritics,
|
||||||
|
})
|
||||||
|
note =
|
||||||
|
(
|
||||||
|
await plugin.searchEngine.getSuggestions(query, {
|
||||||
|
singleFilePath,
|
||||||
|
})
|
||||||
|
)[0] ?? null
|
||||||
|
}
|
||||||
|
selectedIndex = 0
|
||||||
|
await scrollIntoView()
|
||||||
|
})()
|
||||||
|
|
||||||
|
$: {
|
||||||
|
if (note) {
|
||||||
|
let groups = getGroups(note.matches)
|
||||||
|
|
||||||
|
// If there are quotes in the search,
|
||||||
|
// only show results that match at least one of the quotes
|
||||||
|
const exactTerms = query.getExactTerms()
|
||||||
|
if (exactTerms.length) {
|
||||||
|
groups = groups.filter(group =>
|
||||||
|
exactTerms.every(exact =>
|
||||||
|
group.some(match => match.match.includes(exact))
|
||||||
|
)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
groupedOffsets = groups.map(group => Math.round(group.first()!.offset))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Group together close matches to reduce the number of results
|
||||||
|
*/
|
||||||
|
function getGroups(matches: SearchMatch[]): SearchMatch[][] {
|
||||||
|
const groups: SearchMatch[][] = []
|
||||||
|
let lastOffset = -1
|
||||||
|
let count = 0 // Avoid infinite loops
|
||||||
|
while (++count < 100) {
|
||||||
|
const group = getGroupedMatches(matches, lastOffset, excerptAfter)
|
||||||
|
if (!group.length) break
|
||||||
|
lastOffset = group.last()!.offset
|
||||||
|
groups.push(group)
|
||||||
|
}
|
||||||
|
return groups
|
||||||
|
}
|
||||||
|
|
||||||
|
function getGroupedMatches(
|
||||||
|
matches: SearchMatch[],
|
||||||
|
offsetFrom: number,
|
||||||
|
maxLen: number
|
||||||
|
): SearchMatch[] {
|
||||||
|
const first = matches.find(m => m.offset > offsetFrom)
|
||||||
|
if (!first) return []
|
||||||
|
return matches.filter(
|
||||||
|
m => m.offset > offsetFrom && m.offset <= first.offset + maxLen
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
function moveIndex(dir: 1 | -1): void {
|
||||||
|
selectedIndex = loopIndex(selectedIndex + dir, groupedOffsets.length)
|
||||||
|
scrollIntoView()
|
||||||
|
}
|
||||||
|
|
||||||
|
async function scrollIntoView(): Promise<void> {
|
||||||
|
await tick()
|
||||||
|
const elem = document.querySelector(`[data-result-id="${selectedIndex}"]`)
|
||||||
|
elem?.scrollIntoView({ behavior: 'auto', block: 'nearest' })
|
||||||
|
}
|
||||||
|
|
||||||
|
async function openSelectionInNewTab(): Promise<void> {
|
||||||
|
return openSelection(true)
|
||||||
|
}
|
||||||
|
|
||||||
|
async function openSelection(newTab = false): Promise<void> {
|
||||||
|
if (note) {
|
||||||
|
modal.close()
|
||||||
|
if (parent) parent.close()
|
||||||
|
|
||||||
|
// Open (or switch focus to) the note
|
||||||
|
const reg = plugin.textProcessor.stringsToRegex(note.foundWords)
|
||||||
|
reg.exec(note.content)
|
||||||
|
await openNote(plugin.app, note, reg.lastIndex, newTab)
|
||||||
|
|
||||||
|
// Move cursor to the match
|
||||||
|
const view = plugin.app.workspace.getActiveViewOfType(MarkdownView)
|
||||||
|
if (!view) {
|
||||||
|
// Not an editable document, so no cursor to place
|
||||||
|
return
|
||||||
|
// throw new Error('OmniSearch - No active MarkdownView')
|
||||||
|
}
|
||||||
|
|
||||||
|
const offset = groupedOffsets[selectedIndex] ?? 0
|
||||||
|
const pos = view.editor.offsetToPos(offset)
|
||||||
|
pos.ch = 0
|
||||||
|
view.editor.setCursor(pos)
|
||||||
|
view.editor.scrollIntoView({
|
||||||
|
from: { line: pos.line - 10, ch: 0 },
|
||||||
|
to: { line: pos.line + 10, ch: 0 },
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function switchToVaultModal(): void {
|
||||||
|
new LocatorVaultModal(plugin, searchQuery ?? previousQuery).open()
|
||||||
|
modal.close()
|
||||||
|
}
|
||||||
|
</script>
|
||||||
|
|
||||||
|
<InputSearch
|
||||||
|
plugin="{plugin}"
|
||||||
|
on:input="{e => (searchQuery = e.detail)}"
|
||||||
|
placeholder="Locator - File"
|
||||||
|
initialValue="{previousQuery}">
|
||||||
|
<div class="omnisearch-input-container__buttons">
|
||||||
|
{#if Platform.isMobile}
|
||||||
|
<button on:click="{switchToVaultModal}">Vault search</button>
|
||||||
|
{/if}
|
||||||
|
</div>
|
||||||
|
</InputSearch>
|
||||||
|
|
||||||
|
<ModalContainer>
|
||||||
|
{#if groupedOffsets.length && note}
|
||||||
|
{#each groupedOffsets as offset, i}
|
||||||
|
<ResultItemInFile
|
||||||
|
{plugin}
|
||||||
|
offset="{offset}"
|
||||||
|
note="{note}"
|
||||||
|
index="{i}"
|
||||||
|
selected="{i === selectedIndex}"
|
||||||
|
on:mousemove="{_e => (selectedIndex = i)}"
|
||||||
|
on:click="{evt => openSelection(evt.ctrlKey)}"
|
||||||
|
on:auxclick="{evt => {
|
||||||
|
if (evt.button == 1) openSelection(true)
|
||||||
|
}}" />
|
||||||
|
{/each}
|
||||||
|
{:else}
|
||||||
|
<div style="text-align: center;">
|
||||||
|
We found 0 results for your search here.
|
||||||
|
</div>
|
||||||
|
{/if}
|
||||||
|
</ModalContainer>
|
||||||
|
|
||||||
|
<div class="prompt-instructions">
|
||||||
|
<div class="prompt-instruction">
|
||||||
|
<span class="prompt-instruction-command">↑↓</span><span>to navigate</span>
|
||||||
|
</div>
|
||||||
|
<div class="prompt-instruction">
|
||||||
|
<span class="prompt-instruction-command">↵</span><span>to open</span>
|
||||||
|
</div>
|
||||||
|
<div class="prompt-instruction">
|
||||||
|
<span class="prompt-instruction-command">tab</span>
|
||||||
|
<span>to switch to Vault Search</span>
|
||||||
|
</div>
|
||||||
|
<div class="prompt-instruction">
|
||||||
|
<span class="prompt-instruction-command">esc</span>
|
||||||
|
{#if !!parent}
|
||||||
|
<span>to go back to Vault Search</span>
|
||||||
|
{:else}
|
||||||
|
<span>to close</span>
|
||||||
|
{/if}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="prompt-instruction">
|
||||||
|
<span class="prompt-instruction-command">{getCtrlKeyLabel()} ↵</span>
|
||||||
|
<span>to open in a new pane</span>
|
||||||
|
</div>
|
||||||
|
</div>
|
428
src/components/ModalVault.svelte
Normal file
428
src/components/ModalVault.svelte
Normal file
|
@ -0,0 +1,428 @@
|
||||||
|
<script lang="ts">
|
||||||
|
import { MarkdownView, Notice, Platform, TFile } from 'obsidian'
|
||||||
|
import { onDestroy, onMount, tick } from 'svelte'
|
||||||
|
import InputSearch from './InputSearch.svelte'
|
||||||
|
import ModalContainer from './ModalContainer.svelte'
|
||||||
|
import {
|
||||||
|
eventBus,
|
||||||
|
indexingStep,
|
||||||
|
IndexingStepType,
|
||||||
|
type ResultNote,
|
||||||
|
SPACE_OR_PUNCTUATION,
|
||||||
|
Action,
|
||||||
|
} from '../globals'
|
||||||
|
import { createNote, openNote } from '../tools/notes'
|
||||||
|
import {
|
||||||
|
getCtrlKeyLabel,
|
||||||
|
getAltKeyLabel,
|
||||||
|
getExtension,
|
||||||
|
isFilePDF,
|
||||||
|
loopIndex,
|
||||||
|
} from '../tools/utils'
|
||||||
|
import {
|
||||||
|
LocatorInFileModal,
|
||||||
|
type LocatorVaultModal,
|
||||||
|
} from '../components/modals'
|
||||||
|
import ResultItemVault from './ResultItemVault.svelte'
|
||||||
|
import { Query } from '../search/query'
|
||||||
|
import { cancelable, CancelablePromise } from 'cancelable-promise'
|
||||||
|
import { debounce } from 'lodash-es'
|
||||||
|
import type LocatorPlugin from '../main'
|
||||||
|
import LazyLoader from './lazy-loader/LazyLoader.svelte'
|
||||||
|
|
||||||
|
let {
|
||||||
|
modal,
|
||||||
|
previousQuery,
|
||||||
|
plugin,
|
||||||
|
}: {
|
||||||
|
modal: LocatorVaultModal
|
||||||
|
previousQuery?: string | undefined
|
||||||
|
plugin: LocatorPlugin
|
||||||
|
} = $props()
|
||||||
|
|
||||||
|
let selectedIndex = $state(0)
|
||||||
|
let historySearchIndex = 0
|
||||||
|
let searchQuery = $state(previousQuery ?? '')
|
||||||
|
let resultNotes: ResultNote[] = $state([])
|
||||||
|
let query: Query
|
||||||
|
let indexingStepDesc = $state('')
|
||||||
|
let searching = $state(true)
|
||||||
|
let refInput: InputSearch | undefined
|
||||||
|
let openInNewPaneKey: string = $state('')
|
||||||
|
let openInCurrentPaneKey: string = $state('')
|
||||||
|
let createInNewPaneKey: string = $state('')
|
||||||
|
let createInCurrentPaneKey: string = $state('')
|
||||||
|
let openInNewLeafKey: string = `${getCtrlKeyLabel()} ${getAltKeyLabel()} ↵`
|
||||||
|
|
||||||
|
const selectedNote = $derived(resultNotes[selectedIndex])
|
||||||
|
|
||||||
|
$effect(() => {
|
||||||
|
if (plugin.settings.openInNewPane) {
|
||||||
|
openInNewPaneKey = '↵'
|
||||||
|
openInCurrentPaneKey = getCtrlKeyLabel() + ' ↵'
|
||||||
|
createInNewPaneKey = 'Shift ↵'
|
||||||
|
createInCurrentPaneKey = getCtrlKeyLabel() + ' Shift ↵'
|
||||||
|
} else {
|
||||||
|
openInNewPaneKey = getCtrlKeyLabel() + ' ↵'
|
||||||
|
openInCurrentPaneKey = '↵'
|
||||||
|
createInNewPaneKey = getCtrlKeyLabel() + ' Shift ↵'
|
||||||
|
createInCurrentPaneKey = 'Shift ↵'
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
$effect(() => {
|
||||||
|
if (searchQuery) {
|
||||||
|
updateResultsDebounced()
|
||||||
|
} else {
|
||||||
|
searching = false
|
||||||
|
resultNotes = []
|
||||||
|
}
|
||||||
|
})
|
||||||
|
$effect(() => {
|
||||||
|
switch ($indexingStep) {
|
||||||
|
case IndexingStepType.LoadingCache:
|
||||||
|
indexingStepDesc = 'Loading cache...'
|
||||||
|
break
|
||||||
|
case IndexingStepType.ReadingFiles:
|
||||||
|
indexingStepDesc = 'Reading files...'
|
||||||
|
break
|
||||||
|
case IndexingStepType.IndexingFiles:
|
||||||
|
indexingStepDesc = 'Indexing files...'
|
||||||
|
break
|
||||||
|
case IndexingStepType.WritingCache:
|
||||||
|
updateResultsDebounced()
|
||||||
|
indexingStepDesc = 'Updating cache...'
|
||||||
|
break
|
||||||
|
default:
|
||||||
|
updateResultsDebounced()
|
||||||
|
indexingStepDesc = ''
|
||||||
|
break
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
onMount(async () => {
|
||||||
|
eventBus.enable('vault')
|
||||||
|
eventBus.on('vault', Action.Enter, openNoteAndCloseModal)
|
||||||
|
eventBus.on('vault', Action.OpenInBackground, openNoteInBackground)
|
||||||
|
eventBus.on('vault', Action.CreateNote, createNoteAndCloseModal)
|
||||||
|
eventBus.on('vault', Action.OpenInNewPane, openNoteInNewPane)
|
||||||
|
eventBus.on('vault', Action.InsertLink, insertLink)
|
||||||
|
eventBus.on('vault', Action.Tab, switchToInFileModal)
|
||||||
|
eventBus.on('vault', Action.ArrowUp, () => moveIndex(-1))
|
||||||
|
eventBus.on('vault', Action.ArrowDown, () => moveIndex(1))
|
||||||
|
eventBus.on('vault', Action.PrevSearchHistory, prevSearchHistory)
|
||||||
|
eventBus.on('vault', Action.NextSearchHistory, nextSearchHistory)
|
||||||
|
eventBus.on('vault', Action.OpenInNewLeaf, openNoteInNewLeaf)
|
||||||
|
await plugin.notesIndexer.refreshIndex()
|
||||||
|
await updateResultsDebounced()
|
||||||
|
})
|
||||||
|
|
||||||
|
onDestroy(() => {
|
||||||
|
eventBus.disable('vault')
|
||||||
|
})
|
||||||
|
|
||||||
|
async function prevSearchHistory() {
|
||||||
|
// Filter out the empty string, if it's there
|
||||||
|
const history = (await plugin.searchHistory.getHistory()).filter(s => s)
|
||||||
|
if (++historySearchIndex >= history.length) {
|
||||||
|
historySearchIndex = 0
|
||||||
|
}
|
||||||
|
searchQuery = history[historySearchIndex]
|
||||||
|
refInput?.setInputValue(searchQuery ?? '')
|
||||||
|
}
|
||||||
|
|
||||||
|
async function nextSearchHistory() {
|
||||||
|
const history = (await plugin.searchHistory.getHistory()).filter(s => s)
|
||||||
|
if (--historySearchIndex < 0) {
|
||||||
|
historySearchIndex = history.length ? history.length - 1 : 0
|
||||||
|
}
|
||||||
|
searchQuery = history[historySearchIndex]
|
||||||
|
refInput?.setInputValue(searchQuery ?? '')
|
||||||
|
}
|
||||||
|
|
||||||
|
let cancelableQuery: CancelablePromise<ResultNote[]> | null = null
|
||||||
|
async function updateResults() {
|
||||||
|
searching = true
|
||||||
|
// If search is already in progress, cancel it and start a new one
|
||||||
|
if (cancelableQuery) {
|
||||||
|
cancelableQuery.cancel()
|
||||||
|
cancelableQuery = null
|
||||||
|
}
|
||||||
|
query = new Query(searchQuery, {
|
||||||
|
ignoreDiacritics: plugin.settings.ignoreDiacritics,
|
||||||
|
ignoreArabicDiacritics: plugin.settings.ignoreArabicDiacritics,
|
||||||
|
})
|
||||||
|
cancelableQuery = cancelable(
|
||||||
|
new Promise(resolve => {
|
||||||
|
resolve(plugin.searchEngine.getSuggestions(query))
|
||||||
|
})
|
||||||
|
)
|
||||||
|
resultNotes = await cancelableQuery
|
||||||
|
selectedIndex = 0
|
||||||
|
await scrollIntoView()
|
||||||
|
searching = false
|
||||||
|
}
|
||||||
|
|
||||||
|
// Debounce this function to avoid multiple calls caused by Svelte reactivity
|
||||||
|
const updateResultsDebounced = debounce(updateResults, 0)
|
||||||
|
|
||||||
|
function onClick(evt?: MouseEvent | KeyboardEvent) {
|
||||||
|
if (!selectedNote) return
|
||||||
|
if (evt?.ctrlKey) {
|
||||||
|
openNoteInNewPane()
|
||||||
|
} else {
|
||||||
|
openNoteAndCloseModal()
|
||||||
|
}
|
||||||
|
modal.close()
|
||||||
|
}
|
||||||
|
|
||||||
|
function openNoteAndCloseModal(): void {
|
||||||
|
if (!selectedNote) return
|
||||||
|
openSearchResult(selectedNote)
|
||||||
|
modal.close()
|
||||||
|
}
|
||||||
|
|
||||||
|
function openNoteInBackground(): void {
|
||||||
|
if (!selectedNote) return
|
||||||
|
openSearchResult(selectedNote, true)
|
||||||
|
}
|
||||||
|
|
||||||
|
function openNoteInNewPane(): void {
|
||||||
|
if (!selectedNote) return
|
||||||
|
openSearchResult(selectedNote, true)
|
||||||
|
modal.close()
|
||||||
|
}
|
||||||
|
|
||||||
|
function openNoteInNewLeaf(): void {
|
||||||
|
if (!selectedNote) return
|
||||||
|
openSearchResult(selectedNote, true, true)
|
||||||
|
modal.close()
|
||||||
|
}
|
||||||
|
|
||||||
|
function saveCurrentQuery() {
|
||||||
|
if (searchQuery) {
|
||||||
|
plugin.searchHistory.addToHistory(searchQuery)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function openSearchResult(
|
||||||
|
note: ResultNote,
|
||||||
|
newPane = false,
|
||||||
|
newLeaf = false
|
||||||
|
) {
|
||||||
|
saveCurrentQuery()
|
||||||
|
const offset = note.matches?.[0]?.offset ?? 0
|
||||||
|
openNote(plugin.app, note, offset, newPane, newLeaf)
|
||||||
|
}
|
||||||
|
|
||||||
|
async function onClickCreateNote(_e: MouseEvent) {
|
||||||
|
await createNoteAndCloseModal()
|
||||||
|
}
|
||||||
|
|
||||||
|
async function createNoteAndCloseModal(opt?: {
|
||||||
|
newLeaf: boolean
|
||||||
|
}): Promise<void> {
|
||||||
|
if (searchQuery) {
|
||||||
|
try {
|
||||||
|
await createNote(plugin.app, searchQuery, opt?.newLeaf)
|
||||||
|
} catch (e) {
|
||||||
|
new Notice((e as Error).message)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
modal.close()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function insertLink(): void {
|
||||||
|
if (!selectedNote) return
|
||||||
|
const file = plugin.app.vault
|
||||||
|
.getMarkdownFiles()
|
||||||
|
.find(f => f.path === selectedNote.path)
|
||||||
|
const active = plugin.app.workspace.getActiveFile()
|
||||||
|
const view = plugin.app.workspace.getActiveViewOfType(MarkdownView)
|
||||||
|
if (!view?.editor) {
|
||||||
|
new Notice('Locator - Error - No active editor', 3000)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Generate link
|
||||||
|
let link: string
|
||||||
|
if (file && active) {
|
||||||
|
link = plugin.app.fileManager.generateMarkdownLink(
|
||||||
|
file,
|
||||||
|
active.path,
|
||||||
|
'',
|
||||||
|
selectedNote.displayTitle
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
const maybeDisplayTitle =
|
||||||
|
selectedNote.displayTitle === '' ? '' : `|${selectedNote.displayTitle}`
|
||||||
|
link = `[[${selectedNote.basename}.${getExtension(
|
||||||
|
selectedNote.path
|
||||||
|
)}${maybeDisplayTitle}]]`
|
||||||
|
}
|
||||||
|
|
||||||
|
// Inject link
|
||||||
|
const cursor = view.editor.getCursor()
|
||||||
|
view.editor.replaceRange(link, cursor, cursor)
|
||||||
|
cursor.ch += link.length
|
||||||
|
view.editor.setCursor(cursor)
|
||||||
|
|
||||||
|
modal.close()
|
||||||
|
}
|
||||||
|
|
||||||
|
function switchToInFileModal(): void {
|
||||||
|
// Do nothing if the selectedNote is a PDF,
|
||||||
|
// or if there is 0 match (e.g indexing in progress)
|
||||||
|
if (
|
||||||
|
selectedNote &&
|
||||||
|
(isFilePDF(selectedNote?.path) || !selectedNote?.matches.length)
|
||||||
|
) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
saveCurrentQuery()
|
||||||
|
modal.close()
|
||||||
|
|
||||||
|
if (selectedNote) {
|
||||||
|
// Open in-file modal for selected search result
|
||||||
|
const file = plugin.app.vault.getAbstractFileByPath(selectedNote.path)
|
||||||
|
if (file && file instanceof TFile) {
|
||||||
|
new LocatorInFileModal(plugin, file, searchQuery).open()
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// Open in-file modal for active file
|
||||||
|
const view = plugin.app.workspace.getActiveViewOfType(MarkdownView)
|
||||||
|
if (view?.file) {
|
||||||
|
new LocatorInFileModal(plugin, view.file, searchQuery).open()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function moveIndex(dir: 1 | -1): void {
|
||||||
|
selectedIndex = loopIndex(selectedIndex + dir, resultNotes.length)
|
||||||
|
scrollIntoView()
|
||||||
|
}
|
||||||
|
|
||||||
|
async function scrollIntoView(): Promise<void> {
|
||||||
|
await tick()
|
||||||
|
if (selectedNote) {
|
||||||
|
const elem = activeWindow.document.querySelector(
|
||||||
|
`[data-result-id="${selectedNote.path}"]`
|
||||||
|
)
|
||||||
|
elem?.scrollIntoView({ behavior: 'auto', block: 'nearest' })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
</script>
|
||||||
|
|
||||||
|
<InputSearch
|
||||||
|
bind:this={refInput}
|
||||||
|
{plugin}
|
||||||
|
initialValue={searchQuery}
|
||||||
|
on:input={e => (searchQuery = e.detail)}
|
||||||
|
placeholder="Locator - Vault">
|
||||||
|
<div class="omnisearch-input-container__buttons">
|
||||||
|
{#if plugin.settings.showCreateButton}
|
||||||
|
<button on:click={onClickCreateNote}>Create note</button>
|
||||||
|
{/if}
|
||||||
|
{#if Platform.isMobile}
|
||||||
|
<button on:click={switchToInFileModal}>In-File search</button>
|
||||||
|
{/if}
|
||||||
|
</div>
|
||||||
|
</InputSearch>
|
||||||
|
|
||||||
|
{#if indexingStepDesc}
|
||||||
|
<div style="text-align: center; color: var(--text-accent); margin-top: 10px">
|
||||||
|
⏳ Work in progress: {indexingStepDesc}
|
||||||
|
</div>
|
||||||
|
{/if}
|
||||||
|
|
||||||
|
<ModalContainer>
|
||||||
|
{#each resultNotes as result, i}
|
||||||
|
<LazyLoader
|
||||||
|
height={100}
|
||||||
|
offset={500}
|
||||||
|
keep={true}
|
||||||
|
fadeOption={{ delay: 0, duration: 0 }}>
|
||||||
|
<ResultItemVault
|
||||||
|
{plugin}
|
||||||
|
selected={i === selectedIndex}
|
||||||
|
note={result}
|
||||||
|
on:mousemove={_ => (selectedIndex = i)}
|
||||||
|
on:click={onClick}
|
||||||
|
on:auxclick={evt => {
|
||||||
|
if (evt.button == 1) openNoteInNewPane()
|
||||||
|
}} />
|
||||||
|
</LazyLoader>
|
||||||
|
{/each}
|
||||||
|
<div style="text-align: center;">
|
||||||
|
{#if !resultNotes.length && searchQuery && !searching}
|
||||||
|
We found 0 results for your search here.
|
||||||
|
{#if plugin.settings.simpleSearch && searchQuery
|
||||||
|
.split(SPACE_OR_PUNCTUATION)
|
||||||
|
.some(w => w.length < 3)}
|
||||||
|
<br />
|
||||||
|
<span style="color: var(--text-accent); font-size: small">
|
||||||
|
You have enabled "Simpler Search" in the settings, try to type more
|
||||||
|
characters.
|
||||||
|
</span>
|
||||||
|
{/if}
|
||||||
|
{:else if searching}
|
||||||
|
Searching...
|
||||||
|
{/if}
|
||||||
|
</div>
|
||||||
|
</ModalContainer>
|
||||||
|
|
||||||
|
<div class="prompt-instructions">
|
||||||
|
<div class="prompt-instruction">
|
||||||
|
<span class="prompt-instruction-command">↑↓</span><span>to navigate</span>
|
||||||
|
</div>
|
||||||
|
<div class="prompt-instruction">
|
||||||
|
<span class="prompt-instruction-command">{getAltKeyLabel()} ↑↓</span>
|
||||||
|
<span>to cycle history</span>
|
||||||
|
</div>
|
||||||
|
<div class="prompt-instruction">
|
||||||
|
<span class="prompt-instruction-command">{openInCurrentPaneKey}</span>
|
||||||
|
<span>to open</span>
|
||||||
|
</div>
|
||||||
|
<div class="prompt-instruction">
|
||||||
|
<span class="prompt-instruction-command">Tab</span>
|
||||||
|
<span>to switch to In-File Search</span>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="prompt-instruction">
|
||||||
|
<span class="prompt-instruction-command">{openInNewPaneKey}</span>
|
||||||
|
<span>to open in a new pane</span>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="prompt-instruction">
|
||||||
|
<span class="prompt-instruction-command">{openInNewLeafKey}</span>
|
||||||
|
<span>to open in a new split</span>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="prompt-instruction">
|
||||||
|
<span class="prompt-instruction-command">{getCtrlKeyLabel()} o</span>
|
||||||
|
<span>to open in the background</span>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="prompt-instruction">
|
||||||
|
<span class="prompt-instruction-command">{createInCurrentPaneKey}</span>
|
||||||
|
<span>to create</span>
|
||||||
|
</div>
|
||||||
|
<div class="prompt-instruction">
|
||||||
|
<span class="prompt-instruction-command">{createInNewPaneKey}</span>
|
||||||
|
<span>to create in a new pane</span>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="prompt-instruction">
|
||||||
|
<span class="prompt-instruction-command">{getAltKeyLabel()} ↵</span>
|
||||||
|
<span>to insert a link</span>
|
||||||
|
</div>
|
||||||
|
<div class="prompt-instruction">
|
||||||
|
<span class="prompt-instruction-command">{getCtrlKeyLabel()} g</span>
|
||||||
|
<span>to toggle excerpts</span>
|
||||||
|
</div>
|
||||||
|
<div class="prompt-instruction">
|
||||||
|
<span class="prompt-instruction-command">Esc</span><span>to close</span>
|
||||||
|
</div>
|
||||||
|
</div>
|
22
src/components/ResultItemContainer.svelte
Normal file
22
src/components/ResultItemContainer.svelte
Normal file
|
@ -0,0 +1,22 @@
|
||||||
|
<script lang="ts">
|
||||||
|
import GlyphAddNote from './GlyphAddNote.svelte'
|
||||||
|
|
||||||
|
export let id: string
|
||||||
|
export let selected = false
|
||||||
|
export let glyph = false
|
||||||
|
export let cssClass = ''
|
||||||
|
</script>
|
||||||
|
|
||||||
|
<div
|
||||||
|
data-result-id={id}
|
||||||
|
class="suggestion-item omnisearch-result {cssClass}"
|
||||||
|
class:is-selected={selected}
|
||||||
|
on:mousemove
|
||||||
|
on:click
|
||||||
|
on:keypress
|
||||||
|
on:auxclick>
|
||||||
|
{#if glyph}
|
||||||
|
<GlyphAddNote />
|
||||||
|
{/if}
|
||||||
|
<slot />
|
||||||
|
</div>
|
24
src/components/ResultItemInFile.svelte
Normal file
24
src/components/ResultItemInFile.svelte
Normal file
|
@ -0,0 +1,24 @@
|
||||||
|
<script lang="ts">
|
||||||
|
import type { ResultNote } from '../globals'
|
||||||
|
import ResultItemContainer from './ResultItemContainer.svelte'
|
||||||
|
import type LocatorPlugin from '../main'
|
||||||
|
|
||||||
|
export let plugin: LocatorPlugin
|
||||||
|
export let offset: number
|
||||||
|
export let note: ResultNote
|
||||||
|
export let index = 0
|
||||||
|
export let selected = false
|
||||||
|
|
||||||
|
$: cleanedContent = plugin.textProcessor.makeExcerpt(note?.content ?? '', offset)
|
||||||
|
</script>
|
||||||
|
|
||||||
|
<ResultItemContainer
|
||||||
|
id="{index.toString()}"
|
||||||
|
on:auxclick
|
||||||
|
on:click
|
||||||
|
on:mousemove
|
||||||
|
selected="{selected}">
|
||||||
|
<div class="omnisearch-result__body">
|
||||||
|
{@html plugin.textProcessor.highlightText(cleanedContent, note.matches)}
|
||||||
|
</div>
|
||||||
|
</ResultItemContainer>
|
227
src/components/ResultItemVault.svelte
Normal file
227
src/components/ResultItemVault.svelte
Normal file
|
@ -0,0 +1,227 @@
|
||||||
|
<script lang="ts">
|
||||||
|
import { showExcerpt } from '../settings/index'
|
||||||
|
import type { ResultNote } from '../globals'
|
||||||
|
import {
|
||||||
|
getExtension,
|
||||||
|
isFileCanvas,
|
||||||
|
isFileExcalidraw,
|
||||||
|
isFileImage,
|
||||||
|
isFilePDF,
|
||||||
|
pathWithoutFilename,
|
||||||
|
} from '../tools/utils'
|
||||||
|
import ResultItemContainer from './ResultItemContainer.svelte'
|
||||||
|
import type LocatorPlugin from '../main'
|
||||||
|
import { setIcon, TFile } from 'obsidian'
|
||||||
|
import { onMount } from 'svelte'
|
||||||
|
|
||||||
|
// Import icon utility functions
|
||||||
|
import {
|
||||||
|
loadIconData,
|
||||||
|
initializeIconPacks,
|
||||||
|
getIconNameForPath,
|
||||||
|
loadIconSVG,
|
||||||
|
getDefaultIconSVG,
|
||||||
|
} from '../tools/icon-utils'
|
||||||
|
|
||||||
|
export let selected = false
|
||||||
|
export let note: ResultNote
|
||||||
|
export let plugin: LocatorPlugin
|
||||||
|
|
||||||
|
let imagePath: string | null = null
|
||||||
|
let title = ''
|
||||||
|
let notePath = ''
|
||||||
|
let iconData = {}
|
||||||
|
let folderIconSVG: string | null = null
|
||||||
|
let fileIconSVG: string | null = null
|
||||||
|
let prefixToIconPack: { [prefix: string]: string } = {}
|
||||||
|
let iconsPath: string
|
||||||
|
let iconDataLoaded = false // Flag to indicate iconData is loaded
|
||||||
|
|
||||||
|
// Initialize icon data and icon packs once when the component mounts
|
||||||
|
onMount(async () => {
|
||||||
|
iconData = await loadIconData(plugin)
|
||||||
|
const iconPacks = await initializeIconPacks(plugin)
|
||||||
|
prefixToIconPack = iconPacks.prefixToIconPack
|
||||||
|
iconsPath = iconPacks.iconsPath
|
||||||
|
iconDataLoaded = true // Set the flag after iconData is loaded
|
||||||
|
})
|
||||||
|
|
||||||
|
// Reactive statement to call loadIcons() whenever the note changes and iconData is loaded
|
||||||
|
$: if (note && note.path && iconDataLoaded) {
|
||||||
|
;(async () => {
|
||||||
|
// Update title and notePath before loading icons
|
||||||
|
title = note.displayTitle || note.basename
|
||||||
|
notePath = pathWithoutFilename(note.path)
|
||||||
|
await loadIcons()
|
||||||
|
})()
|
||||||
|
}
|
||||||
|
|
||||||
|
async function loadIcons() {
|
||||||
|
// Load folder icon
|
||||||
|
const folderIconName = getIconNameForPath(notePath, iconData)
|
||||||
|
if (folderIconName) {
|
||||||
|
folderIconSVG = await loadIconSVG(
|
||||||
|
folderIconName,
|
||||||
|
plugin,
|
||||||
|
iconsPath,
|
||||||
|
prefixToIconPack
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
// Fallback to default folder icon
|
||||||
|
folderIconSVG = getDefaultIconSVG('folder', plugin)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Load file icon
|
||||||
|
const fileIconName = getIconNameForPath(note.path, iconData)
|
||||||
|
if (fileIconName) {
|
||||||
|
fileIconSVG = await loadIconSVG(
|
||||||
|
fileIconName,
|
||||||
|
plugin,
|
||||||
|
iconsPath,
|
||||||
|
prefixToIconPack
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
// Fallback to default icons based on file type
|
||||||
|
fileIconSVG = getDefaultIconSVG(note.path, plugin)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Svelte action to render SVG content with dynamic updates
|
||||||
|
function renderSVG(node: HTMLElement, svgContent: string) {
|
||||||
|
node.innerHTML = svgContent
|
||||||
|
return {
|
||||||
|
update(newSvgContent: string) {
|
||||||
|
node.innerHTML = newSvgContent
|
||||||
|
},
|
||||||
|
destroy() {
|
||||||
|
node.innerHTML = ''
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let elFolderPathIcon: HTMLElement | null = null
|
||||||
|
let elFilePathIcon: HTMLElement | null = null
|
||||||
|
let elEmbedIcon: HTMLElement | null = null
|
||||||
|
|
||||||
|
$: {
|
||||||
|
imagePath = null
|
||||||
|
if (isFileImage(note.path)) {
|
||||||
|
const file = plugin.app.vault.getAbstractFileByPath(note.path)
|
||||||
|
if (file instanceof TFile) {
|
||||||
|
imagePath = plugin.app.vault.getResourcePath(file)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
$: matchesTitle = plugin.textProcessor.getMatches(title, note.foundWords)
|
||||||
|
$: matchesNotePath = plugin.textProcessor.getMatches(
|
||||||
|
notePath,
|
||||||
|
note.foundWords
|
||||||
|
)
|
||||||
|
$: cleanedContent = plugin.textProcessor.makeExcerpt(
|
||||||
|
note.content,
|
||||||
|
note.matches[0]?.offset ?? -1
|
||||||
|
)
|
||||||
|
$: glyph = false //cacheManager.getLiveDocument(note.path)?.doesNotExist
|
||||||
|
$: {
|
||||||
|
title = note.displayTitle || note.basename
|
||||||
|
notePath = pathWithoutFilename(note.path)
|
||||||
|
|
||||||
|
// Icons
|
||||||
|
if (elFolderPathIcon) {
|
||||||
|
setIcon(elFolderPathIcon, 'folder-open')
|
||||||
|
}
|
||||||
|
if (elFilePathIcon) {
|
||||||
|
if (isFileImage(note.path)) {
|
||||||
|
setIcon(elFilePathIcon, 'image')
|
||||||
|
} else if (isFilePDF(note.path)) {
|
||||||
|
setIcon(elFilePathIcon, 'file-text')
|
||||||
|
} else if (isFileCanvas(note.path) || isFileExcalidraw(note.path)) {
|
||||||
|
setIcon(elFilePathIcon, 'layout-dashboard')
|
||||||
|
} else {
|
||||||
|
setIcon(elFilePathIcon, 'file')
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (elEmbedIcon) {
|
||||||
|
setIcon(elEmbedIcon, 'corner-down-right')
|
||||||
|
}
|
||||||
|
}
|
||||||
|
</script>
|
||||||
|
|
||||||
|
<ResultItemContainer
|
||||||
|
glyph="{glyph}"
|
||||||
|
id="{note.path}"
|
||||||
|
cssClass=" {note.isEmbed ? 'omnisearch-result__embed' : ''}"
|
||||||
|
on:auxclick
|
||||||
|
on:click
|
||||||
|
on:mousemove
|
||||||
|
selected="{selected}">
|
||||||
|
<div>
|
||||||
|
<div class="omnisearch-result__title-container">
|
||||||
|
<span class="omnisearch-result__title">
|
||||||
|
{#if note.isEmbed}
|
||||||
|
<span
|
||||||
|
bind:this="{elEmbedIcon}"
|
||||||
|
title="The document above is embedded in this note"></span>
|
||||||
|
{:else}
|
||||||
|
<!-- File Icon -->
|
||||||
|
{#if fileIconSVG}
|
||||||
|
<span class="omnisearch-result__icon" use:renderSVG="{fileIconSVG}"
|
||||||
|
></span>
|
||||||
|
{/if}
|
||||||
|
{/if}
|
||||||
|
<span>
|
||||||
|
{@html plugin.textProcessor.highlightText(title, matchesTitle)}
|
||||||
|
</span>
|
||||||
|
{#if !note.displayTitle}
|
||||||
|
<span class="omnisearch-result__extension">
|
||||||
|
.{getExtension(note.path)}
|
||||||
|
</span>
|
||||||
|
{/if}
|
||||||
|
|
||||||
|
<!-- Counter -->
|
||||||
|
{#if note.matches.length > 0}
|
||||||
|
<span class="omnisearch-result__counter">
|
||||||
|
{note.matches.length} {note.matches.length > 1
|
||||||
|
? 'matches'
|
||||||
|
: 'match'}
|
||||||
|
</span>
|
||||||
|
{/if}
|
||||||
|
</span>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<!-- Folder path -->
|
||||||
|
{#if notePath}
|
||||||
|
<div class="omnisearch-result__folder-path">
|
||||||
|
<!-- Folder Icon -->
|
||||||
|
{#if folderIconSVG}
|
||||||
|
<span class="omnisearch-result__icon" use:renderSVG="{folderIconSVG}"
|
||||||
|
></span>
|
||||||
|
{/if}
|
||||||
|
<span>
|
||||||
|
{@html plugin.textProcessor.highlightText(notePath, matchesNotePath)}
|
||||||
|
</span>
|
||||||
|
</div>
|
||||||
|
{/if}
|
||||||
|
|
||||||
|
<!-- Do not display the excerpt for embedding references -->
|
||||||
|
{#if !note.isEmbed}
|
||||||
|
<div style="display: flex; flex-direction: row;">
|
||||||
|
{#if $showExcerpt}
|
||||||
|
<div class="omnisearch-result__body">
|
||||||
|
{@html plugin.textProcessor.highlightText(
|
||||||
|
cleanedContent,
|
||||||
|
note.matches
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
{/if}
|
||||||
|
|
||||||
|
<!-- Image -->
|
||||||
|
{#if imagePath}
|
||||||
|
<div class="omnisearch-result__image-container">
|
||||||
|
<img style="width: 100px" src="{imagePath}" alt="" />
|
||||||
|
</div>
|
||||||
|
{/if}
|
||||||
|
</div>
|
||||||
|
{/if}
|
||||||
|
</div>
|
||||||
|
</ResultItemContainer>
|
208
src/components/lazy-loader/LazyLoader.svelte
Normal file
208
src/components/lazy-loader/LazyLoader.svelte
Normal file
|
@ -0,0 +1,208 @@
|
||||||
|
<div use:load class={rootClass} style="height: {rootInitialHeight}">
|
||||||
|
{#if loaded}
|
||||||
|
<div
|
||||||
|
in:fade={fadeOption || {}}
|
||||||
|
class={contentClass}
|
||||||
|
style={contentStyle}
|
||||||
|
>
|
||||||
|
<slot>Lazy load content</slot>
|
||||||
|
</div>
|
||||||
|
{#if !contentShow && placeholder}
|
||||||
|
<Placeholder {placeholder} {placeholderProps} />
|
||||||
|
{/if}
|
||||||
|
{:else if placeholder}
|
||||||
|
<Placeholder {placeholder} {placeholderProps} />
|
||||||
|
{/if}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<script>
|
||||||
|
// https://github.com/leafOfTree/svelte-lazy
|
||||||
|
import { fade } from 'svelte/transition';
|
||||||
|
import Placeholder from './Placeholder.svelte';
|
||||||
|
export let keep = false;
|
||||||
|
export let height = 0;
|
||||||
|
export let offset = 150;
|
||||||
|
export let fadeOption = {
|
||||||
|
delay: 0,
|
||||||
|
duration: 400,
|
||||||
|
};
|
||||||
|
export let resetHeightDelay = 0;
|
||||||
|
export let onload = null;
|
||||||
|
export let placeholder = null;
|
||||||
|
export let placeholderProps = null;
|
||||||
|
let className = '';
|
||||||
|
export { className as class };
|
||||||
|
|
||||||
|
const rootClass = 'svelte-lazy'
|
||||||
|
+ (className ? ' ' + className : '');
|
||||||
|
const contentClass = 'svelte-lazy-content';
|
||||||
|
const rootInitialHeight = getStyleHeight();
|
||||||
|
let loaded = false;
|
||||||
|
|
||||||
|
let contentShow = true;
|
||||||
|
$: contentStyle = !contentShow ? 'display: none' : '';
|
||||||
|
|
||||||
|
function load(node) {
|
||||||
|
setHeight(node);
|
||||||
|
const handler = createHandler(node);
|
||||||
|
addListeners(handler);
|
||||||
|
setTimeout(() => {
|
||||||
|
handler();
|
||||||
|
});
|
||||||
|
const observer = observeNode(node, handler);
|
||||||
|
|
||||||
|
return {
|
||||||
|
destroy: () => {
|
||||||
|
removeListeners(handler);
|
||||||
|
observer.unobserve(node);
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
function createHandler(node) {
|
||||||
|
const handler = throttle(e => {
|
||||||
|
const nodeTop = node.getBoundingClientRect().top;
|
||||||
|
const nodeBottom = node.getBoundingClientRect().bottom;
|
||||||
|
const expectedTop = getContainerHeight(e) + offset;
|
||||||
|
|
||||||
|
if (nodeTop <= expectedTop && nodeBottom > 0) {
|
||||||
|
loadNode(node);
|
||||||
|
} else if (!keep) {
|
||||||
|
unload(node)
|
||||||
|
}
|
||||||
|
}, 200);
|
||||||
|
return handler;
|
||||||
|
}
|
||||||
|
|
||||||
|
function observeNode(node, handler) {
|
||||||
|
const observer = new IntersectionObserver((entries) => {
|
||||||
|
if (entries[0].isIntersecting) {
|
||||||
|
loadNode(node);
|
||||||
|
}
|
||||||
|
})
|
||||||
|
observer.observe(node);
|
||||||
|
return observer;
|
||||||
|
}
|
||||||
|
|
||||||
|
function unload(node) {
|
||||||
|
setHeight(node);
|
||||||
|
loaded = false
|
||||||
|
}
|
||||||
|
|
||||||
|
function loadNode(node, handler) {
|
||||||
|
if (loaded) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
loaded = true;
|
||||||
|
resetHeight(node);
|
||||||
|
if (onload) {
|
||||||
|
onload(node);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function addListeners(handler) {
|
||||||
|
document.addEventListener('scroll', handler, true);
|
||||||
|
window.addEventListener('resize', handler);
|
||||||
|
}
|
||||||
|
|
||||||
|
function removeListeners(handler) {
|
||||||
|
document.removeEventListener('scroll', handler, true);
|
||||||
|
window.removeEventListener('resize', handler);
|
||||||
|
}
|
||||||
|
|
||||||
|
function getStyleHeight() {
|
||||||
|
return (typeof height === 'number')
|
||||||
|
? height + 'px'
|
||||||
|
: height;
|
||||||
|
}
|
||||||
|
|
||||||
|
function setHeight(node) {
|
||||||
|
if (height) {
|
||||||
|
node.style.height = getStyleHeight();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function resetHeight(node) {
|
||||||
|
setTimeout(() => {
|
||||||
|
const isLoading = checkImgLoadingStatus(node);
|
||||||
|
if (!isLoading) {
|
||||||
|
node.style.height = 'auto';
|
||||||
|
}
|
||||||
|
// Add a delay to wait for remote resources like images to load
|
||||||
|
}, resetHeightDelay);
|
||||||
|
}
|
||||||
|
|
||||||
|
function checkImgLoadingStatus(node) {
|
||||||
|
const img = node.querySelector('img');
|
||||||
|
if (!img) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!img.complete) {
|
||||||
|
contentShow = false;
|
||||||
|
|
||||||
|
node.addEventListener('load', () => {
|
||||||
|
// Use auto height if loading successfully
|
||||||
|
contentShow = true;
|
||||||
|
node.style.height = 'auto';
|
||||||
|
}, { capture: true, once: true });
|
||||||
|
|
||||||
|
node.addEventListener('error', () => {
|
||||||
|
// Show content with fixed height if there is error
|
||||||
|
contentShow = true;
|
||||||
|
}, { capture: true, once: true });
|
||||||
|
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (img.naturalHeight === 0) {
|
||||||
|
// Use fixed height if img has zero height
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
function getContainerHeight(e) {
|
||||||
|
if (e?.target?.getBoundingClientRect) {
|
||||||
|
return e.target.getBoundingClientRect().bottom;
|
||||||
|
} else {
|
||||||
|
return window.innerHeight;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// From underscore souce code
|
||||||
|
function throttle(func, wait, options) {
|
||||||
|
let context, args, result;
|
||||||
|
let timeout = null;
|
||||||
|
let previous = 0;
|
||||||
|
if (!options) options = {};
|
||||||
|
const later = function() {
|
||||||
|
previous = options.leading === false ? 0 : new Date();
|
||||||
|
timeout = null;
|
||||||
|
result = func.apply(context, args);
|
||||||
|
if (!timeout) context = args = null;
|
||||||
|
};
|
||||||
|
|
||||||
|
return function(event) {
|
||||||
|
const now = new Date();
|
||||||
|
if (!previous && options.leading === false) previous = now;
|
||||||
|
const remaining = wait - (now - previous);
|
||||||
|
context = this;
|
||||||
|
args = arguments;
|
||||||
|
if (remaining <= 0 || remaining > wait) {
|
||||||
|
if (timeout) {
|
||||||
|
clearTimeout(timeout);
|
||||||
|
timeout = null;
|
||||||
|
}
|
||||||
|
previous = now;
|
||||||
|
result = func.apply(context, args);
|
||||||
|
if (!timeout) context = args = null;
|
||||||
|
} else if (!timeout && options.trailing !== false) {
|
||||||
|
timeout = setTimeout(later, remaining);
|
||||||
|
}
|
||||||
|
return result;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
</script>
|
15
src/components/lazy-loader/Placeholder.svelte
Normal file
15
src/components/lazy-loader/Placeholder.svelte
Normal file
|
@ -0,0 +1,15 @@
|
||||||
|
{#if placeholder}
|
||||||
|
<div class={placeholderClass}>
|
||||||
|
{#if typeof placeholder === 'string'}
|
||||||
|
<div>{placeholder}</div>
|
||||||
|
{:else if ['function', 'object'].includes(typeof placeholder)}
|
||||||
|
<svelte:component this={placeholder} {...placeholderProps} />
|
||||||
|
{/if}
|
||||||
|
</div>
|
||||||
|
{/if}
|
||||||
|
|
||||||
|
<script>
|
||||||
|
export let placeholder = null;
|
||||||
|
export let placeholderProps = null;
|
||||||
|
const placeholderClass = 'svelte-lazy-placeholder';
|
||||||
|
</script>
|
223
src/components/modals.ts
Normal file
223
src/components/modals.ts
Normal file
|
@ -0,0 +1,223 @@
|
||||||
|
import { MarkdownView, Modal, TFile } from 'obsidian'
|
||||||
|
import type { Modifier } from 'obsidian'
|
||||||
|
import ModalVault from './ModalVault.svelte'
|
||||||
|
import ModalInFile from './ModalInFile.svelte'
|
||||||
|
import { Action, eventBus, EventNames, isInputComposition } from '../globals'
|
||||||
|
import type LocatorPlugin from '../main'
|
||||||
|
import { mount, unmount } from 'svelte'
|
||||||
|
|
||||||
|
abstract class LocatorModal extends Modal {
|
||||||
|
protected constructor(plugin: LocatorPlugin) {
|
||||||
|
super(plugin.app)
|
||||||
|
const settings = plugin.settings
|
||||||
|
|
||||||
|
// Remove all the default modal's children
|
||||||
|
// so that we can more easily customize it
|
||||||
|
// const closeEl = this.containerEl.find('.modal-close-button')
|
||||||
|
this.modalEl.replaceChildren()
|
||||||
|
// this.modalEl.append(closeEl)
|
||||||
|
this.modalEl.addClass('locator-modal', 'prompt')
|
||||||
|
this.modalEl.removeClass('modal')
|
||||||
|
this.modalEl.tabIndex = -1
|
||||||
|
|
||||||
|
// Setup events that can be listened through the event bus
|
||||||
|
|
||||||
|
// #region Up/Down navigation
|
||||||
|
|
||||||
|
this.scope.register([], 'ArrowDown', e => {
|
||||||
|
e.preventDefault()
|
||||||
|
eventBus.emit(Action.ArrowDown)
|
||||||
|
})
|
||||||
|
this.scope.register([], 'ArrowUp', e => {
|
||||||
|
e.preventDefault()
|
||||||
|
eventBus.emit(Action.ArrowUp)
|
||||||
|
})
|
||||||
|
|
||||||
|
// Ctrl+j/k
|
||||||
|
for (const key of [
|
||||||
|
{ k: 'J', dir: 'down' },
|
||||||
|
{ k: 'K', dir: 'up' },
|
||||||
|
] as const) {
|
||||||
|
for (const modifier of ['Ctrl', 'Mod'] as const) {
|
||||||
|
this.scope.register([modifier], key.k, _e => {
|
||||||
|
if (settings.vimLikeNavigationShortcut) {
|
||||||
|
// e.preventDefault()
|
||||||
|
eventBus.emit('arrow-' + key.dir)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Ctrl+n/p
|
||||||
|
for (const key of [
|
||||||
|
{ k: 'N', dir: 'down' },
|
||||||
|
{ k: 'P', dir: 'up' },
|
||||||
|
] as const) {
|
||||||
|
for (const modifier of ['Ctrl', 'Mod'] as const) {
|
||||||
|
this.scope.register([modifier], key.k, _e => {
|
||||||
|
if (settings.vimLikeNavigationShortcut) {
|
||||||
|
// e.preventDefault()
|
||||||
|
eventBus.emit('arrow-' + key.dir)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// #endregion Up/Down navigation
|
||||||
|
|
||||||
|
let openInCurrentPaneKey: Modifier[]
|
||||||
|
let openInNewPaneKey: Modifier[]
|
||||||
|
let createInCurrentPaneKey: Modifier[]
|
||||||
|
let createInNewPaneKey: Modifier[]
|
||||||
|
let openInNewLeafKey: Modifier[] = ['Mod', 'Alt']
|
||||||
|
if (settings.openInNewPane) {
|
||||||
|
openInCurrentPaneKey = ['Mod']
|
||||||
|
openInNewPaneKey = []
|
||||||
|
createInCurrentPaneKey = ['Mod', 'Shift']
|
||||||
|
createInNewPaneKey = ['Shift']
|
||||||
|
} else {
|
||||||
|
openInCurrentPaneKey = []
|
||||||
|
openInNewPaneKey = ['Mod']
|
||||||
|
createInCurrentPaneKey = ['Shift']
|
||||||
|
createInNewPaneKey = ['Mod', 'Shift']
|
||||||
|
}
|
||||||
|
|
||||||
|
// Open in new pane
|
||||||
|
this.scope.register(openInNewPaneKey, 'Enter', e => {
|
||||||
|
e.preventDefault()
|
||||||
|
eventBus.emit(Action.OpenInNewPane)
|
||||||
|
})
|
||||||
|
|
||||||
|
// Open in a new leaf
|
||||||
|
this.scope.register(openInNewLeafKey, 'Enter', e => {
|
||||||
|
e.preventDefault()
|
||||||
|
eventBus.emit(Action.OpenInNewLeaf)
|
||||||
|
})
|
||||||
|
|
||||||
|
// Insert link
|
||||||
|
this.scope.register(['Alt'], 'Enter', e => {
|
||||||
|
e.preventDefault()
|
||||||
|
eventBus.emit(Action.InsertLink)
|
||||||
|
})
|
||||||
|
|
||||||
|
// Create a new note
|
||||||
|
this.scope.register(createInCurrentPaneKey, 'Enter', e => {
|
||||||
|
e.preventDefault()
|
||||||
|
eventBus.emit(Action.CreateNote)
|
||||||
|
})
|
||||||
|
this.scope.register(createInNewPaneKey, 'Enter', e => {
|
||||||
|
e.preventDefault()
|
||||||
|
eventBus.emit(Action.CreateNote, { newLeaf: true })
|
||||||
|
})
|
||||||
|
|
||||||
|
// Open in current pane
|
||||||
|
this.scope.register(openInCurrentPaneKey, 'Enter', e => {
|
||||||
|
if (!isInputComposition()) {
|
||||||
|
// Check if the user is still typing
|
||||||
|
e.preventDefault()
|
||||||
|
eventBus.emit(Action.Enter)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
// Open in background
|
||||||
|
this.scope.register(['Mod'], 'O', e => {
|
||||||
|
if (!isInputComposition()) {
|
||||||
|
// Check if the user is still typing
|
||||||
|
e.preventDefault()
|
||||||
|
eventBus.emit(Action.OpenInBackground)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
this.scope.register([], 'Tab', e => {
|
||||||
|
e.preventDefault()
|
||||||
|
eventBus.emit(Action.Tab) // Switch context
|
||||||
|
})
|
||||||
|
|
||||||
|
// Search history
|
||||||
|
this.scope.register(['Alt'], 'ArrowDown', e => {
|
||||||
|
e.preventDefault()
|
||||||
|
eventBus.emit(Action.NextSearchHistory)
|
||||||
|
})
|
||||||
|
this.scope.register(['Alt'], 'ArrowUp', e => {
|
||||||
|
e.preventDefault()
|
||||||
|
eventBus.emit(Action.PrevSearchHistory)
|
||||||
|
})
|
||||||
|
|
||||||
|
// Context
|
||||||
|
this.scope.register(['Mod'], 'G', _e => {
|
||||||
|
eventBus.emit(EventNames.ToggleExcerpts)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export class LocatorVaultModal extends LocatorModal {
|
||||||
|
/**
|
||||||
|
* Instanciate the Locator vault modal
|
||||||
|
* @param plugin
|
||||||
|
* @param query The query to pre-fill the search field with
|
||||||
|
*/
|
||||||
|
constructor(plugin: LocatorPlugin, query?: string) {
|
||||||
|
super(plugin)
|
||||||
|
|
||||||
|
// Selected text in the editor
|
||||||
|
const selectedText = plugin.app.workspace
|
||||||
|
.getActiveViewOfType(MarkdownView)
|
||||||
|
?.editor.getSelection()
|
||||||
|
|
||||||
|
plugin.searchHistory.getHistory().then(history => {
|
||||||
|
// Previously searched query (if enabled in settings)
|
||||||
|
const previous = plugin.settings.showPreviousQueryResults
|
||||||
|
? history[0]
|
||||||
|
: null
|
||||||
|
|
||||||
|
// Instantiate and display the Svelte component
|
||||||
|
const cmp = mount(ModalVault, {
|
||||||
|
target: this.modalEl,
|
||||||
|
props: {
|
||||||
|
plugin,
|
||||||
|
modal: this,
|
||||||
|
previousQuery: query || selectedText || previous || '',
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
this.onClose = () => {
|
||||||
|
// Since the component is manually created,
|
||||||
|
// we also need to manually destroy it
|
||||||
|
unmount(cmp)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export class LocatorInFileModal extends LocatorModal {
|
||||||
|
constructor(
|
||||||
|
plugin: LocatorPlugin,
|
||||||
|
file: TFile,
|
||||||
|
searchQuery: string = '',
|
||||||
|
parent?: LocatorModal
|
||||||
|
) {
|
||||||
|
super(plugin)
|
||||||
|
|
||||||
|
const cmp = mount(ModalInFile, {
|
||||||
|
target: this.modalEl,
|
||||||
|
props: {
|
||||||
|
plugin,
|
||||||
|
modal: this,
|
||||||
|
singleFilePath: file.path,
|
||||||
|
parent: parent,
|
||||||
|
previousQuery: searchQuery,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
if (parent) {
|
||||||
|
// Hide the parent vault modal, and show it back when this one is closed
|
||||||
|
parent.containerEl.toggleVisibility(false)
|
||||||
|
}
|
||||||
|
this.onClose = () => {
|
||||||
|
if (parent) {
|
||||||
|
parent.containerEl.toggleVisibility(true)
|
||||||
|
}
|
||||||
|
unmount(cmp)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
91
src/database.ts
Normal file
91
src/database.ts
Normal file
|
@ -0,0 +1,91 @@
|
||||||
|
import Dexie from 'dexie'
|
||||||
|
import type { AsPlainObject } from 'minisearch'
|
||||||
|
import type { DocumentRef } from './globals'
|
||||||
|
import { Notice } from 'obsidian'
|
||||||
|
import type LocatorPlugin from './main'
|
||||||
|
|
||||||
|
export class Database extends Dexie {
|
||||||
|
public static readonly dbVersion = 10
|
||||||
|
searchHistory!: Dexie.Table<{ id?: number; query: string }, number>
|
||||||
|
minisearch!: Dexie.Table<
|
||||||
|
{
|
||||||
|
date: string
|
||||||
|
paths: DocumentRef[]
|
||||||
|
data: AsPlainObject
|
||||||
|
},
|
||||||
|
string
|
||||||
|
>
|
||||||
|
embeds!: Dexie.Table<{ embedded: string; referencedBy: string[] }, string>
|
||||||
|
|
||||||
|
constructor(private plugin: LocatorPlugin) {
|
||||||
|
super(Database.getDbName(plugin.app.appId))
|
||||||
|
// Database structure
|
||||||
|
this.version(Database.dbVersion).stores({
|
||||||
|
searchHistory: '++id',
|
||||||
|
minisearch: 'date',
|
||||||
|
embeds: 'embedded',
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
private static getDbName(appId: string) {
|
||||||
|
return 'locator/cache/' + appId
|
||||||
|
}
|
||||||
|
|
||||||
|
//#endregion Table declarations
|
||||||
|
|
||||||
|
public async getMinisearchCache(): Promise<{
|
||||||
|
paths: DocumentRef[]
|
||||||
|
data: AsPlainObject
|
||||||
|
} | null> {
|
||||||
|
try {
|
||||||
|
const cachedIndex = (await this.plugin.database.minisearch.toArray())[0]
|
||||||
|
return cachedIndex
|
||||||
|
} catch (e) {
|
||||||
|
new Notice(
|
||||||
|
'Locator - Cache missing or invalid. Some freezes may occur while Locator indexes your vault.'
|
||||||
|
)
|
||||||
|
console.error('Locator - Error while loading Minisearch cache')
|
||||||
|
console.error(e)
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public async writeMinisearchCache(): Promise<void> {
|
||||||
|
const minisearchJson = this.plugin.searchEngine.getSerializedMiniSearch()
|
||||||
|
const paths = this.plugin.searchEngine.getSerializedIndexedDocuments()
|
||||||
|
const database = this.plugin.database
|
||||||
|
await database.minisearch.clear()
|
||||||
|
await database.minisearch.add({
|
||||||
|
date: new Date().toISOString(),
|
||||||
|
paths,
|
||||||
|
data: minisearchJson,
|
||||||
|
})
|
||||||
|
console.debug('Locator - Search cache written')
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Deletes Locator databases that have an older version than the current one
|
||||||
|
*/
|
||||||
|
public async clearOldDatabases(): Promise<void> {
|
||||||
|
const toDelete = (await indexedDB.databases()).filter(
|
||||||
|
db =>
|
||||||
|
db.name === Database.getDbName(this.plugin.app.appId) &&
|
||||||
|
// version multiplied by 10 https://github.com/dexie/Dexie.js/issues/59
|
||||||
|
db.version !== Database.dbVersion * 10
|
||||||
|
)
|
||||||
|
if (toDelete.length) {
|
||||||
|
console.debug('Locator - Those IndexedDb databases will be deleted:')
|
||||||
|
for (const db of toDelete) {
|
||||||
|
if (db.name) {
|
||||||
|
indexedDB.deleteDatabase(db.name)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public async clearCache() {
|
||||||
|
await this.minisearch.clear()
|
||||||
|
await this.embeds.clear()
|
||||||
|
new Notice('Locator - Cache cleared. Please restart Obsidian.')
|
||||||
|
}
|
||||||
|
}
|
119
src/globals.ts
Normal file
119
src/globals.ts
Normal file
|
@ -0,0 +1,119 @@
|
||||||
|
import { EventBus } from './tools/event-bus'
|
||||||
|
import { writable } from 'svelte/store'
|
||||||
|
import type { TFile } from 'obsidian'
|
||||||
|
|
||||||
|
export const regexLineSplit = /\r?\n|\r|((\.|\?|!)( |\r?\n|\r))/g
|
||||||
|
export const regexYaml = /^---\s*\n(.*?)\n?^---\s?/ms
|
||||||
|
export const regexStripQuotes = /^"|"$|^'|'$/g
|
||||||
|
export const chsRegex = /[\u4e00-\u9fa5]/
|
||||||
|
export const regexExtensions = /(?:^|\s)\.(\w+)/g
|
||||||
|
|
||||||
|
export const excerptBefore = 100
|
||||||
|
export const excerptAfter = 300
|
||||||
|
|
||||||
|
export const K_DISABLE_OMNISEARCH = 'locator-disabled'
|
||||||
|
|
||||||
|
export const eventBus = new EventBus()
|
||||||
|
|
||||||
|
export const EventNames = {
|
||||||
|
ToggleExcerpts: 'toggle-excerpts',
|
||||||
|
} as const
|
||||||
|
|
||||||
|
export const enum IndexingStepType {
|
||||||
|
Done,
|
||||||
|
LoadingCache,
|
||||||
|
ReadingFiles,
|
||||||
|
IndexingFiles,
|
||||||
|
WritingCache,
|
||||||
|
}
|
||||||
|
|
||||||
|
export const enum Action {
|
||||||
|
Enter = 'enter',
|
||||||
|
OpenInBackground = 'open-in-background',
|
||||||
|
CreateNote = 'create-note',
|
||||||
|
OpenInNewPane = 'open-in-new-pane',
|
||||||
|
InsertLink = 'insert-link',
|
||||||
|
Tab = 'tab',
|
||||||
|
ArrowUp = 'arrow-up',
|
||||||
|
ArrowDown = 'arrow-down',
|
||||||
|
PrevSearchHistory = 'prev-search-history',
|
||||||
|
NextSearchHistory = 'next-search-history',
|
||||||
|
OpenInNewLeaf = 'open-in-new-leaf',
|
||||||
|
}
|
||||||
|
|
||||||
|
export const enum RecencyCutoff {
|
||||||
|
Disabled = '0',
|
||||||
|
Day = '1',
|
||||||
|
Week = '2',
|
||||||
|
Month = '3',
|
||||||
|
}
|
||||||
|
|
||||||
|
export type DocumentRef = { path: string; mtime: number }
|
||||||
|
|
||||||
|
export type IndexedDocument = {
|
||||||
|
path: string
|
||||||
|
basename: string
|
||||||
|
displayTitle: string
|
||||||
|
mtime: number
|
||||||
|
|
||||||
|
content: string
|
||||||
|
cleanedContent: string
|
||||||
|
aliases: string
|
||||||
|
tags: string[]
|
||||||
|
unmarkedTags: string[]
|
||||||
|
headings1: string
|
||||||
|
headings2: string
|
||||||
|
headings3: string
|
||||||
|
|
||||||
|
// TODO: reimplement this
|
||||||
|
doesNotExist?: boolean
|
||||||
|
parent?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export type SearchMatch = {
|
||||||
|
match: string
|
||||||
|
offset: number
|
||||||
|
}
|
||||||
|
export const isSearchMatch = (o: { offset?: number }): o is SearchMatch => {
|
||||||
|
return o.offset !== undefined
|
||||||
|
}
|
||||||
|
|
||||||
|
export const indexingStep = writable(IndexingStepType.Done)
|
||||||
|
|
||||||
|
export type ResultNote = {
|
||||||
|
score: number
|
||||||
|
path: string
|
||||||
|
basename: string
|
||||||
|
displayTitle: string
|
||||||
|
content: string
|
||||||
|
foundWords: string[]
|
||||||
|
matches: SearchMatch[]
|
||||||
|
isEmbed: boolean
|
||||||
|
}
|
||||||
|
|
||||||
|
let inComposition = false
|
||||||
|
|
||||||
|
export function toggleInputComposition(toggle: boolean): void {
|
||||||
|
inComposition = toggle
|
||||||
|
}
|
||||||
|
|
||||||
|
export function isInputComposition(): boolean {
|
||||||
|
return inComposition
|
||||||
|
}
|
||||||
|
|
||||||
|
export type TextExtractorApi = {
|
||||||
|
extractText: (file: TFile) => Promise<string>
|
||||||
|
canFileBeExtracted: (filePath: string) => boolean
|
||||||
|
}
|
||||||
|
|
||||||
|
export type AIImageAnalyzerAPI = {
|
||||||
|
analyzeImage: (file: TFile) => Promise<string>
|
||||||
|
canBeAnalyzed: (file: TFile) => boolean
|
||||||
|
}
|
||||||
|
|
||||||
|
export const SEPARATORS =
|
||||||
|
/[|\t\n\r\^"= -#%-*,.`\/<>:;?@[-\]_{}\u00A0\u00A1\u00A7\u00AB\u00B6\u00B7\u00BB\u00BF\u037E\u0387\u055A-\u055F\u0589\u058A\u05BE\u05C0\u05C3\u05C6\u05F3\u05F4\u0609\u060A\u060C\u060D\u061B\u061E\u061F\u066A-\u066D\u06D4\u0700-\u070D\u07F7-\u07F9\u0830-\u083E\u085E\u0964\u0965\u0970\u09FD\u0A76\u0AF0\u0C77\u0C84\u0DF4\u0E4F\u0E5A\u0E5B\u0F04-\u0F12\u0F14\u0F3A-\u0F3D\u0F85\u0FD0-\u0FD4\u0FD9\u0FDA\u104A-\u104F\u10FB\u1360-\u1368\u1400\u166E\u1680\u169B\u169C\u16EB-\u16ED\u1735\u1736\u17D4-\u17D6\u17D8-\u17DA\u1800-\u180A\u1944\u1945\u1A1E\u1A1F\u1AA0-\u1AA6\u1AA8-\u1AAD\u1B5A-\u1B60\u1BFC-\u1BFF\u1C3B-\u1C3F\u1C7E\u1C7F\u1CC0-\u1CC7\u1CD3\u2000-\u200A\u2010-\u2029\u202F-\u2043\u2045-\u2051\u2053-\u205F\u207D\u207E\u208D\u208E\u2308-\u230B\u2329\u232A\u2768-\u2775\u27C5\u27C6\u27E6-\u27EF\u2983-\u2998\u29D8-\u29DB\u29FC\u29FD\u2CF9-\u2CFC\u2CFE\u2CFF\u2D70\u2E00-\u2E2E\u2E30-\u2E4F\u3000-\u3003\u3008-\u3011\u3014-\u301F\u3030\u303D\u30A0\u30FB\uA4FE\uA4FF\uA60D-\uA60F\uA673\uA67E\uA6F2-\uA6F7\uA874-\uA877\uA8CE\uA8CF\uA8F8-\uA8FA\uA8FC\uA92E\uA92F\uA95F\uA9C1-\uA9CD\uA9DE\uA9DF\uAA5C-\uAA5F\uAADE\uAADF\uAAF0\uAAF1\uABEB\uFD3E\uFD3F\uFE10-\uFE19\uFE30-\uFE52\uFE54-\uFE61\uFE63\uFE68\uFE6A\uFE6B\uFF01-\uFF03\uFF05-\uFF0A\uFF0C-\uFF0F\uFF1A\uFF1B\uFF1F\uFF20\uFF3B-\uFF3D\uFF3F\uFF5B\uFF5D\uFF5F-\uFF65]/
|
||||||
|
.toString()
|
||||||
|
.slice(1, -1)
|
||||||
|
export const SPACE_OR_PUNCTUATION = new RegExp(`${SEPARATORS}+`, 'u')
|
||||||
|
export const BRACKETS_AND_SPACE = /[|\[\]\(\)<>\{\} \t\n\r]/u
|
339
src/main.ts
Normal file
339
src/main.ts
Normal file
|
@ -0,0 +1,339 @@
|
||||||
|
import {
|
||||||
|
App,
|
||||||
|
Notice,
|
||||||
|
Platform,
|
||||||
|
Plugin,
|
||||||
|
type PluginManifest,
|
||||||
|
TFile,
|
||||||
|
} from 'obsidian'
|
||||||
|
import {
|
||||||
|
LocatorInFileModal,
|
||||||
|
LocatorVaultModal,
|
||||||
|
} from './components/modals'
|
||||||
|
import {
|
||||||
|
getDefaultSettings,
|
||||||
|
loadSettings,
|
||||||
|
SettingsTab,
|
||||||
|
showExcerpt,
|
||||||
|
} from './settings'
|
||||||
|
import type { LocatorSettings } from './settings/utils'
|
||||||
|
import { isCacheEnabled } from './settings/utils'
|
||||||
|
import { saveSettings } from './settings/utils'
|
||||||
|
import { isPluginDisabled } from './settings/utils'
|
||||||
|
import {
|
||||||
|
eventBus,
|
||||||
|
EventNames,
|
||||||
|
indexingStep,
|
||||||
|
IndexingStepType,
|
||||||
|
type TextExtractorApi,
|
||||||
|
type AIImageAnalyzerAPI,
|
||||||
|
} from './globals'
|
||||||
|
import { notifyOnIndexed, registerAPI } from './tools/api'
|
||||||
|
import { Database } from './database'
|
||||||
|
import { SearchEngine } from './search/search-engine'
|
||||||
|
import { DocumentsRepository } from './repositories/documents-repository'
|
||||||
|
import { logVerbose } from './tools/utils'
|
||||||
|
import { NotesIndexer } from './notes-indexer'
|
||||||
|
import { TextProcessor } from './tools/text-processing'
|
||||||
|
import { EmbedsRepository } from './repositories/embeds-repository'
|
||||||
|
import { SearchHistory } from './search/search-history'
|
||||||
|
|
||||||
|
export default class LocatorPlugin extends Plugin {
|
||||||
|
// FIXME: fix the type
|
||||||
|
public apiHttpServer: null | any = null
|
||||||
|
public settings: LocatorSettings = getDefaultSettings(this.app)
|
||||||
|
|
||||||
|
public readonly documentsRepository: DocumentsRepository
|
||||||
|
public readonly embedsRepository = new EmbedsRepository(this)
|
||||||
|
public readonly database = new Database(this)
|
||||||
|
|
||||||
|
public readonly notesIndexer = new NotesIndexer(this)
|
||||||
|
public readonly textProcessor = new TextProcessor(this)
|
||||||
|
public readonly searchEngine = new SearchEngine(this)
|
||||||
|
public readonly searchHistory = new SearchHistory(this)
|
||||||
|
|
||||||
|
private ribbonButton?: HTMLElement
|
||||||
|
private refreshIndexCallback?: (ev: FocusEvent) => any
|
||||||
|
|
||||||
|
constructor(app: App, manifest: PluginManifest) {
|
||||||
|
super(app, manifest)
|
||||||
|
this.documentsRepository = new DocumentsRepository(this)
|
||||||
|
}
|
||||||
|
|
||||||
|
async onload(): Promise<void> {
|
||||||
|
this.settings = await loadSettings(this)
|
||||||
|
this.addSettingTab(new SettingsTab(this))
|
||||||
|
|
||||||
|
if (!Platform.isMobile) {
|
||||||
|
import('./tools/api-server').then(
|
||||||
|
m => (this.apiHttpServer = m.getServer(this))
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (isPluginDisabled(this.app)) {
|
||||||
|
console.debug('Plugin disabled')
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
await cleanOldCacheFiles(this.app)
|
||||||
|
await this.database.clearOldDatabases()
|
||||||
|
|
||||||
|
registerAPI(this)
|
||||||
|
|
||||||
|
const settings = this.settings
|
||||||
|
if (settings.ribbonIcon) {
|
||||||
|
this.addRibbonButton()
|
||||||
|
}
|
||||||
|
|
||||||
|
eventBus.disable('vault')
|
||||||
|
eventBus.disable('infile')
|
||||||
|
eventBus.on('global', EventNames.ToggleExcerpts, () => {
|
||||||
|
showExcerpt.set(!settings.showExcerpt)
|
||||||
|
})
|
||||||
|
|
||||||
|
// Commands to display Locator modals
|
||||||
|
this.addCommand({
|
||||||
|
id: 'show-modal',
|
||||||
|
name: 'Vault search',
|
||||||
|
callback: () => {
|
||||||
|
new LocatorVaultModal(this).open()
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
this.addCommand({
|
||||||
|
id: 'show-modal-infile',
|
||||||
|
name: 'In-file search',
|
||||||
|
editorCallback: (_editor, view) => {
|
||||||
|
if (view.file) {
|
||||||
|
new LocatorInFileModal(this, view.file).open()
|
||||||
|
}
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
const searchEngine = this.searchEngine
|
||||||
|
|
||||||
|
this.app.workspace.onLayoutReady(async () => {
|
||||||
|
// Listeners to keep the search index up-to-date
|
||||||
|
this.registerEvent(
|
||||||
|
this.app.vault.on('create', file => {
|
||||||
|
if (!(file instanceof TFile)) return
|
||||||
|
if (this.notesIndexer.isFileIndexable(file.path)) {
|
||||||
|
logVerbose('Indexing new file', file.path)
|
||||||
|
searchEngine.addFromPaths([file.path])
|
||||||
|
this.embedsRepository.refreshEmbedsForNote(file.path)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
)
|
||||||
|
this.registerEvent(
|
||||||
|
this.app.vault.on('delete', file => {
|
||||||
|
if (!(file instanceof TFile)) return
|
||||||
|
logVerbose('Removing file', file.path)
|
||||||
|
this.documentsRepository.removeDocument(file.path)
|
||||||
|
searchEngine.removeFromPaths([file.path])
|
||||||
|
this.embedsRepository.removeFile(file.path)
|
||||||
|
})
|
||||||
|
)
|
||||||
|
this.registerEvent(
|
||||||
|
this.app.vault.on('modify', async file => {
|
||||||
|
if (!(file instanceof TFile)) return
|
||||||
|
if (this.notesIndexer.isFileIndexable(file.path)) {
|
||||||
|
this.notesIndexer.flagNoteForReindex(file)
|
||||||
|
}
|
||||||
|
this.embedsRepository.refreshEmbedsForNote(file.path)
|
||||||
|
})
|
||||||
|
)
|
||||||
|
this.registerEvent(
|
||||||
|
this.app.vault.on('rename', async (file, oldPath) => {
|
||||||
|
if (!(file instanceof TFile)) return
|
||||||
|
if (this.notesIndexer.isFileIndexable(file.path)) {
|
||||||
|
logVerbose('Renaming file', file.path)
|
||||||
|
this.documentsRepository.removeDocument(oldPath)
|
||||||
|
await this.documentsRepository.addDocument(file.path)
|
||||||
|
|
||||||
|
searchEngine.removeFromPaths([oldPath])
|
||||||
|
await searchEngine.addFromPaths([file.path])
|
||||||
|
|
||||||
|
this.embedsRepository.renameFile(oldPath, file.path)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
)
|
||||||
|
|
||||||
|
this.refreshIndexCallback = this.notesIndexer.refreshIndex.bind(
|
||||||
|
this.notesIndexer
|
||||||
|
)
|
||||||
|
addEventListener('blur', this.refreshIndexCallback!)
|
||||||
|
removeEventListener
|
||||||
|
|
||||||
|
await this.executeFirstLaunchTasks()
|
||||||
|
await this.populateIndex()
|
||||||
|
|
||||||
|
if (this.apiHttpServer && settings.httpApiEnabled) {
|
||||||
|
this.apiHttpServer.listen(settings.httpApiPort)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
async executeFirstLaunchTasks(): Promise<void> {
|
||||||
|
const code = '1.21.0'
|
||||||
|
// if (settings.welcomeMessage !== code && getTextExtractor()) {
|
||||||
|
// const welcome = new DocumentFragment()
|
||||||
|
// welcome.createSpan({}, span => {
|
||||||
|
// span.innerHTML = `🔎 Locator can now index .docx and .xlsx documents. Don't forget to update Text Extractor and enable the toggle in Locator settings.`
|
||||||
|
// })
|
||||||
|
// new Notice(welcome, 20_000)
|
||||||
|
// }
|
||||||
|
this.settings.welcomeMessage = code
|
||||||
|
await this.saveData(this.settings)
|
||||||
|
}
|
||||||
|
|
||||||
|
async onunload(): Promise<void> {
|
||||||
|
// @ts-ignore
|
||||||
|
delete globalThis['locator']
|
||||||
|
|
||||||
|
if (this.refreshIndexCallback) {
|
||||||
|
removeEventListener('blur', this.refreshIndexCallback)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Clear cache when disabling Locator
|
||||||
|
if (process.env.NODE_ENV === 'production') {
|
||||||
|
await this.database.clearCache()
|
||||||
|
}
|
||||||
|
this.apiHttpServer.close()
|
||||||
|
}
|
||||||
|
|
||||||
|
addRibbonButton(): void {
|
||||||
|
this.ribbonButton = this.addRibbonIcon('search', 'Locator', _evt => {
|
||||||
|
new LocatorVaultModal(this).open()
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
removeRibbonButton(): void {
|
||||||
|
if (this.ribbonButton) {
|
||||||
|
this.ribbonButton.parentNode?.removeChild(this.ribbonButton)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Plugin dependency - Chs Patch for Chinese word segmentation
|
||||||
|
* @returns
|
||||||
|
*/
|
||||||
|
public getChsSegmenter(): any | undefined {
|
||||||
|
return (this.app as any).plugins.plugins['cm-chs-patch']
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Plugin dependency - Text Extractor
|
||||||
|
* @returns
|
||||||
|
*/
|
||||||
|
public getTextExtractor(): TextExtractorApi | undefined {
|
||||||
|
return (this.app as any).plugins?.plugins?.['text-extractor']?.api
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Plugin dependency - Ai Image Analyzer
|
||||||
|
* @returns
|
||||||
|
*/
|
||||||
|
public getAIImageAnalyzer(): AIImageAnalyzerAPI | undefined {
|
||||||
|
return (this.app as any).plugins?.plugins?.['ai-image-analyzer']?.api
|
||||||
|
}
|
||||||
|
|
||||||
|
private async populateIndex(): Promise<void> {
|
||||||
|
console.time('Indexing total time')
|
||||||
|
indexingStep.set(IndexingStepType.ReadingFiles)
|
||||||
|
const files = this.app.vault
|
||||||
|
.getFiles()
|
||||||
|
.filter(f => this.notesIndexer.isFileIndexable(f.path))
|
||||||
|
console.debug(`${files.length} files total`)
|
||||||
|
console.debug(`Cache is ${isCacheEnabled() ? 'enabled' : 'disabled'}`)
|
||||||
|
// Map documents in the background
|
||||||
|
// Promise.all(files.map(f => cacheManager.addToLiveCache(f.path)))
|
||||||
|
|
||||||
|
const searchEngine = this.searchEngine
|
||||||
|
if (isCacheEnabled()) {
|
||||||
|
console.time('Loading index from cache')
|
||||||
|
indexingStep.set(IndexingStepType.LoadingCache)
|
||||||
|
const hasCache = await searchEngine.loadCache()
|
||||||
|
if (hasCache) {
|
||||||
|
console.timeEnd('Loading index from cache')
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const diff = searchEngine.getDocumentsToReindex(
|
||||||
|
files.map(f => ({ path: f.path, mtime: f.stat.mtime }))
|
||||||
|
)
|
||||||
|
|
||||||
|
if (isCacheEnabled()) {
|
||||||
|
if (diff.toAdd.length) {
|
||||||
|
console.debug(
|
||||||
|
'Total number of files to add/update: ' + diff.toAdd.length
|
||||||
|
)
|
||||||
|
}
|
||||||
|
if (diff.toRemove.length) {
|
||||||
|
console.debug(
|
||||||
|
'Total number of files to remove: ' + diff.toRemove.length
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (diff.toAdd.length >= 1000 && isCacheEnabled()) {
|
||||||
|
new Notice(
|
||||||
|
`${diff.toAdd.length} files need to be indexed. Obsidian may experience stutters and freezes during the process`,
|
||||||
|
10_000
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
indexingStep.set(IndexingStepType.IndexingFiles)
|
||||||
|
searchEngine.removeFromPaths(diff.toRemove.map(o => o.path))
|
||||||
|
await searchEngine.addFromPaths(diff.toAdd.map(o => o.path))
|
||||||
|
|
||||||
|
if ((diff.toRemove.length || diff.toAdd.length) && isCacheEnabled()) {
|
||||||
|
indexingStep.set(IndexingStepType.WritingCache)
|
||||||
|
|
||||||
|
// Disable settings.useCache while writing the cache, in case it freezes
|
||||||
|
const cacheEnabled = this.settings.useCache
|
||||||
|
if (cacheEnabled && !this.settings.DANGER_forceSaveCache) {
|
||||||
|
this.settings.useCache = false
|
||||||
|
await saveSettings(this)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Write the cache
|
||||||
|
await this.database.writeMinisearchCache()
|
||||||
|
await this.embedsRepository.writeToCache()
|
||||||
|
|
||||||
|
// Re-enable settings.caching
|
||||||
|
if (cacheEnabled) {
|
||||||
|
this.settings.useCache = true
|
||||||
|
await saveSettings(this)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
console.timeEnd('Indexing total time')
|
||||||
|
if (diff.toAdd.length >= 1000 && isCacheEnabled()) {
|
||||||
|
new Notice(`Your files have been indexed.`)
|
||||||
|
}
|
||||||
|
indexingStep.set(IndexingStepType.Done)
|
||||||
|
notifyOnIndexed()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Read the files and feed them to Minisearch
|
||||||
|
*/
|
||||||
|
|
||||||
|
async function cleanOldCacheFiles(app: App) {
|
||||||
|
const toDelete = [
|
||||||
|
`${app.vault.configDir}/plugins/locator/searchIndex.json`,
|
||||||
|
`${app.vault.configDir}/plugins/locator/notesCache.json`,
|
||||||
|
`${app.vault.configDir}/plugins/locator/notesCache.data`,
|
||||||
|
`${app.vault.configDir}/plugins/locator/searchIndex.data`,
|
||||||
|
`${app.vault.configDir}/plugins/locator/historyCache.json`,
|
||||||
|
`${app.vault.configDir}/plugins/locator/pdfCache.data`,
|
||||||
|
]
|
||||||
|
for (const item of toDelete) {
|
||||||
|
if (await app.vault.adapter.exists(item)) {
|
||||||
|
try {
|
||||||
|
await app.vault.adapter.remove(item)
|
||||||
|
} catch (e) {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
116
src/notes-indexer.ts
Normal file
116
src/notes-indexer.ts
Normal file
|
@ -0,0 +1,116 @@
|
||||||
|
import type { TAbstractFile } from 'obsidian'
|
||||||
|
import type LocatorPlugin from './main'
|
||||||
|
import { removeAnchors } from './tools/notes'
|
||||||
|
import type { IndexedDocument } from './globals'
|
||||||
|
import {
|
||||||
|
isFileCanvas,
|
||||||
|
isFileFromDataloom,
|
||||||
|
isFileImage,
|
||||||
|
isFilePDF,
|
||||||
|
logVerbose,
|
||||||
|
} from './tools/utils'
|
||||||
|
|
||||||
|
export class NotesIndexer {
|
||||||
|
private notesToReindex = new Set<TAbstractFile>()
|
||||||
|
|
||||||
|
constructor(private plugin: LocatorPlugin) {}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Updated notes are not reindexed immediately for performance reasons.
|
||||||
|
* They're added to a list, and reindex is done the next time we open Locator.
|
||||||
|
*/
|
||||||
|
public flagNoteForReindex(note: TAbstractFile): void {
|
||||||
|
this.notesToReindex.add(note)
|
||||||
|
}
|
||||||
|
|
||||||
|
public async refreshIndex(): Promise<void> {
|
||||||
|
for (const file of this.notesToReindex) {
|
||||||
|
logVerbose('Updating file', file.path)
|
||||||
|
await this.plugin.documentsRepository.addDocument(file.path)
|
||||||
|
}
|
||||||
|
|
||||||
|
const paths = [...this.notesToReindex].map(n => n.path)
|
||||||
|
if (paths.length) {
|
||||||
|
this.plugin.searchEngine.removeFromPaths(paths)
|
||||||
|
await this.plugin.searchEngine.addFromPaths(paths)
|
||||||
|
this.notesToReindex.clear()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public isFileIndexable(path: string): boolean {
|
||||||
|
return this.isFilenameIndexable(path) || this.isContentIndexable(path)
|
||||||
|
}
|
||||||
|
|
||||||
|
public isContentIndexable(path: string): boolean {
|
||||||
|
const settings = this.plugin.settings
|
||||||
|
const hasTextExtractor = !!this.plugin.getTextExtractor()
|
||||||
|
const hasAIImageAnalyzer = !!this.plugin.getAIImageAnalyzer()
|
||||||
|
const canIndexPDF = hasTextExtractor && settings.PDFIndexing
|
||||||
|
const canIndexImages = hasTextExtractor && settings.imagesIndexing
|
||||||
|
const canIndexImagesAI = hasAIImageAnalyzer && settings.aiImageIndexing
|
||||||
|
return (
|
||||||
|
this.isFilePlaintext(path) ||
|
||||||
|
isFileCanvas(path) ||
|
||||||
|
isFileFromDataloom(path) ||
|
||||||
|
(canIndexPDF && isFilePDF(path)) ||
|
||||||
|
(canIndexImages && isFileImage(path)) ||
|
||||||
|
(canIndexImagesAI && isFileImage(path))
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
public isFilenameIndexable(path: string): boolean {
|
||||||
|
return (
|
||||||
|
this.canIndexUnsupportedFiles() ||
|
||||||
|
this.isFilePlaintext(path) ||
|
||||||
|
isFileCanvas(path) ||
|
||||||
|
isFileFromDataloom(path)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
public canIndexUnsupportedFiles(): boolean {
|
||||||
|
return (
|
||||||
|
this.plugin.settings.unsupportedFilesIndexing === 'yes' ||
|
||||||
|
(this.plugin.settings.unsupportedFilesIndexing === 'default' &&
|
||||||
|
!!this.plugin.app.vault.getConfig('showUnsupportedFiles'))
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Index a non-existing note.
|
||||||
|
* Useful to find internal links that lead (yet) to nowhere
|
||||||
|
* @param name
|
||||||
|
* @param parent The note referencing the
|
||||||
|
*/
|
||||||
|
public generateIndexableNonexistingDocument(
|
||||||
|
name: string,
|
||||||
|
parent: string
|
||||||
|
): IndexedDocument {
|
||||||
|
name = removeAnchors(name)
|
||||||
|
const filename = name + (name.endsWith('.md') ? '' : '.md')
|
||||||
|
|
||||||
|
return {
|
||||||
|
path: filename,
|
||||||
|
basename: name,
|
||||||
|
displayTitle: '',
|
||||||
|
mtime: 0,
|
||||||
|
|
||||||
|
content: '',
|
||||||
|
cleanedContent: '',
|
||||||
|
tags: [],
|
||||||
|
unmarkedTags: [],
|
||||||
|
aliases: '',
|
||||||
|
headings1: '',
|
||||||
|
headings2: '',
|
||||||
|
headings3: '',
|
||||||
|
|
||||||
|
doesNotExist: true,
|
||||||
|
parent,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public isFilePlaintext(path: string): boolean {
|
||||||
|
return [...this.plugin.settings.indexedFileTypes, 'md'].some(t =>
|
||||||
|
path.endsWith(`.${t}`)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
261
src/repositories/documents-repository.ts
Normal file
261
src/repositories/documents-repository.ts
Normal file
|
@ -0,0 +1,261 @@
|
||||||
|
import { normalizePath, Notice, TFile } from 'obsidian'
|
||||||
|
import type { IndexedDocument } from '../globals'
|
||||||
|
import {
|
||||||
|
countError,
|
||||||
|
extractHeadingsFromCache,
|
||||||
|
getAliasesFromMetadata,
|
||||||
|
getTagsFromMetadata,
|
||||||
|
isFileCanvas,
|
||||||
|
isFileFromDataloom,
|
||||||
|
isFileImage,
|
||||||
|
isFileOffice,
|
||||||
|
isFilePDF,
|
||||||
|
logVerbose,
|
||||||
|
removeDiacritics,
|
||||||
|
stripMarkdownCharacters,
|
||||||
|
} from '../tools/utils'
|
||||||
|
import type { CanvasData } from 'obsidian/canvas'
|
||||||
|
import type LocatorPlugin from '../main'
|
||||||
|
import { getNonExistingNotes } from '../tools/notes'
|
||||||
|
|
||||||
|
export class DocumentsRepository {
|
||||||
|
/**
|
||||||
|
* The "live cache", containing all indexed vault files
|
||||||
|
* in the form of IndexedDocuments
|
||||||
|
*/
|
||||||
|
private documents: Map<string, IndexedDocument> = new Map()
|
||||||
|
private errorsCount = 0
|
||||||
|
private errorsWarned = false
|
||||||
|
|
||||||
|
constructor(private plugin: LocatorPlugin) {
|
||||||
|
setInterval(() => {
|
||||||
|
if (this.errorsCount > 0) {
|
||||||
|
--this.errorsCount
|
||||||
|
}
|
||||||
|
}, 1000)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Set or update the live cache with the content of the given file.
|
||||||
|
* @param path
|
||||||
|
*/
|
||||||
|
public async addDocument(path: string): Promise<void> {
|
||||||
|
try {
|
||||||
|
const doc = await this.getAndMapIndexedDocument(path)
|
||||||
|
if (!doc.path) {
|
||||||
|
console.error(
|
||||||
|
`Missing .path field in IndexedDocument "${doc.basename}", skipping`
|
||||||
|
)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
this.documents.set(path, doc)
|
||||||
|
this.plugin.embedsRepository.refreshEmbedsForNote(path)
|
||||||
|
} catch (e) {
|
||||||
|
console.warn(`Locator: Error while adding "${path}" to live cache`, e)
|
||||||
|
// Shouldn't be needed, but...
|
||||||
|
this.removeDocument(path)
|
||||||
|
countError()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public removeDocument(path: string): void {
|
||||||
|
this.documents.delete(path)
|
||||||
|
}
|
||||||
|
|
||||||
|
public async getDocument(path: string): Promise<IndexedDocument> {
|
||||||
|
if (this.documents.has(path)) {
|
||||||
|
return this.documents.get(path)!
|
||||||
|
}
|
||||||
|
logVerbose('Generating IndexedDocument from', path)
|
||||||
|
await this.addDocument(path)
|
||||||
|
const document = this.documents.get(path)
|
||||||
|
|
||||||
|
// Only happens if the cache is corrupted
|
||||||
|
if (!document) {
|
||||||
|
console.error('Locator', path, 'cannot be read')
|
||||||
|
countError()
|
||||||
|
}
|
||||||
|
|
||||||
|
// The document might be undefined, but this shouldn't stop the search from mostly working
|
||||||
|
return document!
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This function is responsible for extracting the text from a file and
|
||||||
|
* returning it as an `IndexedDocument` object.
|
||||||
|
* @param path
|
||||||
|
*/
|
||||||
|
private async getAndMapIndexedDocument(
|
||||||
|
path: string
|
||||||
|
): Promise<IndexedDocument> {
|
||||||
|
path = normalizePath(path)
|
||||||
|
const app = this.plugin.app
|
||||||
|
const file = app.vault.getAbstractFileByPath(path)
|
||||||
|
if (!file) throw new Error(`Invalid file path: "${path}"`)
|
||||||
|
if (!(file instanceof TFile)) throw new Error(`Not a TFile: "${path}"`)
|
||||||
|
let content: string | null = null
|
||||||
|
|
||||||
|
const extractor = this.plugin.getTextExtractor()
|
||||||
|
const aiImageAnalyzer = this.plugin.getAIImageAnalyzer()
|
||||||
|
|
||||||
|
// ** Plain text **
|
||||||
|
// Just read the file content
|
||||||
|
if (this.plugin.notesIndexer.isFilePlaintext(path)) {
|
||||||
|
content = await app.vault.cachedRead(file)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ** Canvas **
|
||||||
|
// Extract the text fields from the json
|
||||||
|
else if (isFileCanvas(path)) {
|
||||||
|
const fileContents = await app.vault.cachedRead(file)
|
||||||
|
const canvas: CanvasData = fileContents ? JSON.parse(fileContents) : {}
|
||||||
|
let texts: string[] = []
|
||||||
|
// Concatenate text from the canvas fields
|
||||||
|
for (const node of canvas.nodes ?? []) {
|
||||||
|
if (node.type === 'text') {
|
||||||
|
texts.push(node.text)
|
||||||
|
} else if (node.type === 'file') {
|
||||||
|
texts.push(node.file)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for (const edge of (canvas.edges ?? []).filter(e => !!e.label)) {
|
||||||
|
texts.push(edge.label!)
|
||||||
|
}
|
||||||
|
content = texts.join('\r\n')
|
||||||
|
}
|
||||||
|
|
||||||
|
// ** Dataloom plugin **
|
||||||
|
else if (isFileFromDataloom(path)) {
|
||||||
|
try {
|
||||||
|
const data = JSON.parse(await app.vault.cachedRead(file))
|
||||||
|
// data is a json object, we recursively iterate the keys
|
||||||
|
// and concatenate the values if the key is "markdown"
|
||||||
|
const texts: string[] = []
|
||||||
|
const iterate = (obj: any) => {
|
||||||
|
for (const key in obj) {
|
||||||
|
if (typeof obj[key] === 'object') {
|
||||||
|
iterate(obj[key])
|
||||||
|
} else if (key === 'content') {
|
||||||
|
texts.push(obj[key])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
iterate(data)
|
||||||
|
content = texts.join('\r\n')
|
||||||
|
} catch (e) {
|
||||||
|
console.error('Locator: Error while parsing Dataloom file', path)
|
||||||
|
console.error(e)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ** Image **
|
||||||
|
else if (
|
||||||
|
isFileImage(path) &&
|
||||||
|
((this.plugin.settings.imagesIndexing &&
|
||||||
|
extractor?.canFileBeExtracted(path)) ||
|
||||||
|
(this.plugin.settings.aiImageIndexing &&
|
||||||
|
aiImageAnalyzer?.canBeAnalyzed(file)))
|
||||||
|
) {
|
||||||
|
if (
|
||||||
|
this.plugin.settings.imagesIndexing &&
|
||||||
|
extractor?.canFileBeExtracted(path)
|
||||||
|
) {
|
||||||
|
content = await extractor.extractText(file)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (
|
||||||
|
this.plugin.settings.aiImageIndexing &&
|
||||||
|
aiImageAnalyzer?.canBeAnalyzed(file)
|
||||||
|
) {
|
||||||
|
content = (await aiImageAnalyzer.analyzeImage(file)) + (content ?? '')
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// ** PDF **
|
||||||
|
else if (
|
||||||
|
isFilePDF(path) &&
|
||||||
|
this.plugin.settings.PDFIndexing &&
|
||||||
|
extractor?.canFileBeExtracted(path)
|
||||||
|
) {
|
||||||
|
content = await extractor.extractText(file)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ** Office document **
|
||||||
|
else if (
|
||||||
|
isFileOffice(path) &&
|
||||||
|
this.plugin.settings.officeIndexing &&
|
||||||
|
extractor?.canFileBeExtracted(path)
|
||||||
|
) {
|
||||||
|
content = await extractor.extractText(file)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ** Unsupported files **
|
||||||
|
else if (this.plugin.notesIndexer.isFilenameIndexable(path)) {
|
||||||
|
content = file.path
|
||||||
|
}
|
||||||
|
|
||||||
|
if (content === null || content === undefined) {
|
||||||
|
// This shouldn't happen
|
||||||
|
console.warn(`Locator: ${content} content for file`, file.path)
|
||||||
|
content = ''
|
||||||
|
}
|
||||||
|
const metadata = app.metadataCache.getFileCache(file)
|
||||||
|
|
||||||
|
// Look for links that lead to non-existing files,
|
||||||
|
// and add them to the index.
|
||||||
|
if (metadata) {
|
||||||
|
const nonExisting = getNonExistingNotes(this.plugin.app, file, metadata)
|
||||||
|
for (const name of nonExisting.filter(o => !this.documents.has(o))) {
|
||||||
|
const doc =
|
||||||
|
this.plugin.notesIndexer.generateIndexableNonexistingDocument(
|
||||||
|
name,
|
||||||
|
file.path
|
||||||
|
)
|
||||||
|
// TODO: index non-existing note
|
||||||
|
}
|
||||||
|
|
||||||
|
// EXCALIDRAW
|
||||||
|
// Remove the json code
|
||||||
|
if (metadata.frontmatter?.['excalidraw-plugin']) {
|
||||||
|
const comments =
|
||||||
|
metadata.sections?.filter(s => s.type === 'comment') ?? []
|
||||||
|
for (const { start, end } of comments.map(c => c.position)) {
|
||||||
|
content =
|
||||||
|
content.substring(0, start.offset - 1) +
|
||||||
|
content.substring(end.offset)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let displayTitle: string
|
||||||
|
if (this.plugin.settings.displayTitle === '#heading') {
|
||||||
|
displayTitle = metadata?.headings?.find(h => h.level === 1)?.heading ?? ''
|
||||||
|
} else {
|
||||||
|
displayTitle =
|
||||||
|
metadata?.frontmatter?.[this.plugin.settings.displayTitle] ?? ''
|
||||||
|
}
|
||||||
|
const tags = getTagsFromMetadata(metadata)
|
||||||
|
return {
|
||||||
|
basename: file.basename,
|
||||||
|
displayTitle,
|
||||||
|
content,
|
||||||
|
/** Content without diacritics and markdown chars */
|
||||||
|
cleanedContent: stripMarkdownCharacters(removeDiacritics(content)),
|
||||||
|
path: file.path,
|
||||||
|
mtime: file.stat.mtime,
|
||||||
|
|
||||||
|
tags: tags,
|
||||||
|
unmarkedTags: tags.map(t => t.replace('#', '')),
|
||||||
|
aliases: getAliasesFromMetadata(metadata).join(''),
|
||||||
|
headings1: metadata
|
||||||
|
? extractHeadingsFromCache(metadata, 1).join(' ')
|
||||||
|
: '',
|
||||||
|
headings2: metadata
|
||||||
|
? extractHeadingsFromCache(metadata, 2).join(' ')
|
||||||
|
: '',
|
||||||
|
headings3: metadata
|
||||||
|
? extractHeadingsFromCache(metadata, 3).join(' ')
|
||||||
|
: '',
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
}
|
106
src/repositories/embeds-repository.ts
Normal file
106
src/repositories/embeds-repository.ts
Normal file
|
@ -0,0 +1,106 @@
|
||||||
|
import { getLinkpath, Notice } from 'obsidian'
|
||||||
|
import type LocatorPlugin from '../main'
|
||||||
|
import { logVerbose } from '../tools/utils'
|
||||||
|
|
||||||
|
export class EmbedsRepository {
|
||||||
|
/** Map<embedded file, notes where the embed is referenced> */
|
||||||
|
private embeds: Map<string, Set<string>> = new Map()
|
||||||
|
|
||||||
|
constructor(private plugin: LocatorPlugin) {}
|
||||||
|
|
||||||
|
public addEmbed(embed: string, notePath: string): void {
|
||||||
|
if (!this.embeds.has(embed)) {
|
||||||
|
this.embeds.set(embed, new Set())
|
||||||
|
}
|
||||||
|
this.embeds.get(embed)!.add(notePath)
|
||||||
|
}
|
||||||
|
|
||||||
|
public removeFile(filePath: string): void {
|
||||||
|
// If the file is embedded
|
||||||
|
this.embeds.delete(filePath)
|
||||||
|
// If the file is a note referencing other files
|
||||||
|
this.refreshEmbedsForNote(filePath)
|
||||||
|
}
|
||||||
|
|
||||||
|
public renameFile(oldPath: string, newPath: string): void {
|
||||||
|
// If the file is embedded
|
||||||
|
if (this.embeds.has(oldPath)) {
|
||||||
|
this.embeds.set(newPath, this.embeds.get(oldPath)!)
|
||||||
|
this.embeds.delete(oldPath)
|
||||||
|
}
|
||||||
|
// If the file is a note referencing other files
|
||||||
|
this.embeds.forEach((referencedBy, _key) => {
|
||||||
|
if (referencedBy.has(oldPath)) {
|
||||||
|
referencedBy.delete(oldPath)
|
||||||
|
referencedBy.add(newPath)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
public refreshEmbedsForNote(filePath: string): void {
|
||||||
|
this.embeds.forEach((referencedBy, _key) => {
|
||||||
|
if (referencedBy.has(filePath)) {
|
||||||
|
referencedBy.delete(filePath)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
this.addEmbedsForNote(filePath)
|
||||||
|
}
|
||||||
|
|
||||||
|
public getEmbeds(pathEmbedded: string): string[] {
|
||||||
|
const embeds = this.embeds.has(pathEmbedded)
|
||||||
|
? [...this.embeds.get(pathEmbedded)!]
|
||||||
|
: []
|
||||||
|
return embeds
|
||||||
|
}
|
||||||
|
|
||||||
|
public async writeToCache(): Promise<void> {
|
||||||
|
logVerbose('Writing embeds to cache')
|
||||||
|
const database = this.plugin.database
|
||||||
|
const data: { embedded: string; referencedBy: string[] }[] = []
|
||||||
|
for (const [path, embedsList] of this.embeds) {
|
||||||
|
data.push({ embedded: path, referencedBy: [...embedsList] })
|
||||||
|
}
|
||||||
|
await database.embeds.clear()
|
||||||
|
await database.embeds.bulkAdd(data)
|
||||||
|
}
|
||||||
|
|
||||||
|
public async loadFromCache(): Promise<void> {
|
||||||
|
try {
|
||||||
|
const database = this.plugin.database
|
||||||
|
if (!database.embeds) {
|
||||||
|
logVerbose('No embeds in cache')
|
||||||
|
return
|
||||||
|
}
|
||||||
|
logVerbose('Loading embeds from cache')
|
||||||
|
const embedsArr = await database.embeds.toArray()
|
||||||
|
for (const { embedded: path, referencedBy: embeds } of embedsArr) {
|
||||||
|
for (const embed of embeds) {
|
||||||
|
this.addEmbed(path, embed)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (e) {
|
||||||
|
this.plugin.database.clearCache()
|
||||||
|
console.error('Locator - Error while loading embeds cache')
|
||||||
|
new Notice('Locator - There was an error while loading the cache. Please restart Obsidian.')
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private addEmbedsForNote(notePath: string): void {
|
||||||
|
// Get all embeds from the note
|
||||||
|
// and map them to TFiles to get the real path
|
||||||
|
const embeds = (
|
||||||
|
this.plugin.app.metadataCache.getCache(notePath)?.embeds ?? []
|
||||||
|
)
|
||||||
|
.map(embed =>
|
||||||
|
this.plugin.app.metadataCache.getFirstLinkpathDest(
|
||||||
|
getLinkpath(embed.link),
|
||||||
|
notePath
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.filter(o => !!o)
|
||||||
|
for (const embed of embeds) {
|
||||||
|
this.addEmbed(embed!.path, notePath)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
109
src/search/query.ts
Normal file
109
src/search/query.ts
Normal file
|
@ -0,0 +1,109 @@
|
||||||
|
import { removeDiacritics } from '../tools/utils'
|
||||||
|
import { parse } from 'search-query-parser'
|
||||||
|
|
||||||
|
const keywords = ['ext', 'path'] as const
|
||||||
|
|
||||||
|
type Keywords = {
|
||||||
|
[K in typeof keywords[number]]?: string[]
|
||||||
|
} & { text: string[] }
|
||||||
|
|
||||||
|
export class Query {
|
||||||
|
query: Keywords & {
|
||||||
|
exclude: Keywords
|
||||||
|
}
|
||||||
|
#inQuotes: string[]
|
||||||
|
|
||||||
|
constructor(text = '', options: { ignoreDiacritics: boolean, ignoreArabicDiacritics: boolean}) {
|
||||||
|
if (options.ignoreDiacritics) {
|
||||||
|
text = removeDiacritics(text, options.ignoreArabicDiacritics)
|
||||||
|
}
|
||||||
|
const parsed = parse(text.toLowerCase(), {
|
||||||
|
tokenize: true,
|
||||||
|
keywords: keywords as unknown as string[],
|
||||||
|
}) as unknown as typeof this.query
|
||||||
|
|
||||||
|
// Default values
|
||||||
|
parsed.text = parsed.text ?? []
|
||||||
|
parsed.exclude = parsed.exclude ?? {}
|
||||||
|
parsed.exclude.text = parsed.exclude.text ?? []
|
||||||
|
if (!Array.isArray(parsed.exclude.text)) {
|
||||||
|
parsed.exclude.text = [parsed.exclude.text]
|
||||||
|
}
|
||||||
|
// Remove empty excluded strings
|
||||||
|
parsed.exclude.text = parsed.exclude.text.filter(o => o.length)
|
||||||
|
|
||||||
|
// Make sure that all fields are string[]
|
||||||
|
for (const k of keywords) {
|
||||||
|
const v = parsed[k]
|
||||||
|
if (v) {
|
||||||
|
parsed[k] = Array.isArray(v) ? v : [v]
|
||||||
|
}
|
||||||
|
const e = parsed.exclude[k]
|
||||||
|
if (e) {
|
||||||
|
parsed.exclude[k] = Array.isArray(e) ? e : [e]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
this.query = parsed
|
||||||
|
|
||||||
|
// Extract keywords starting with a dot...
|
||||||
|
const ext = this.query.text
|
||||||
|
.filter(o => o.startsWith('.'))
|
||||||
|
.map(o => o.slice(1))
|
||||||
|
// add them to the ext field...
|
||||||
|
this.query.ext = [...new Set([...ext, ...(this.query.ext ?? [])])]
|
||||||
|
// and remove them from the text field
|
||||||
|
this.query.text = this.query.text.filter(o => !o.startsWith('.'))
|
||||||
|
|
||||||
|
// Get strings in quotes, and remove the quotes
|
||||||
|
this.#inQuotes =
|
||||||
|
text.match(/"([^"]+)"/g)?.map(o => o.replace(/"/g, '')) ?? []
|
||||||
|
}
|
||||||
|
|
||||||
|
public isEmpty(): boolean {
|
||||||
|
for (const k of keywords) {
|
||||||
|
if (this.query[k]?.length) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
if (this.query.text.length) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
public segmentsToStr(): string {
|
||||||
|
return this.query.text.join(' ')
|
||||||
|
}
|
||||||
|
|
||||||
|
public getTags(): string[] {
|
||||||
|
return this.query.text.filter(o => o.startsWith('#'))
|
||||||
|
}
|
||||||
|
|
||||||
|
public getTagsWithoutHashtag(): string[] {
|
||||||
|
return this.getTags().map(o => o.replace(/^#/, ''))
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
*
|
||||||
|
* @returns An array of strings that are in quotes
|
||||||
|
*/
|
||||||
|
public getExactTerms(): string[] {
|
||||||
|
return [
|
||||||
|
...new Set(
|
||||||
|
[
|
||||||
|
...this.query.text.filter(o => o.split(' ').length > 1),
|
||||||
|
...this.#inQuotes,
|
||||||
|
].map(str => str.toLowerCase())
|
||||||
|
),
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
public getBestStringForExcerpt(): string {
|
||||||
|
// If we have quoted expressions, return the longest one
|
||||||
|
if (this.#inQuotes.length) {
|
||||||
|
return this.#inQuotes.sort((a, b) => b.length - a.length)[0] ?? ''
|
||||||
|
}
|
||||||
|
// Otherwise, just return the query as is
|
||||||
|
return this.segmentsToStr()
|
||||||
|
}
|
||||||
|
}
|
554
src/search/search-engine.ts
Normal file
554
src/search/search-engine.ts
Normal file
|
@ -0,0 +1,554 @@
|
||||||
|
import MiniSearch, {
|
||||||
|
type AsPlainObject,
|
||||||
|
type Options,
|
||||||
|
type SearchResult,
|
||||||
|
} from 'minisearch'
|
||||||
|
import {
|
||||||
|
RecencyCutoff,
|
||||||
|
type DocumentRef,
|
||||||
|
type IndexedDocument,
|
||||||
|
type ResultNote,
|
||||||
|
} from '../globals'
|
||||||
|
|
||||||
|
import {
|
||||||
|
chunkArray,
|
||||||
|
countError,
|
||||||
|
logVerbose,
|
||||||
|
removeDiacritics,
|
||||||
|
} from '../tools/utils'
|
||||||
|
import { Notice } from 'obsidian'
|
||||||
|
import type { Query } from './query'
|
||||||
|
import { sortBy } from 'lodash-es'
|
||||||
|
import type LocatorPlugin from '../main'
|
||||||
|
import { Tokenizer } from './tokenizer'
|
||||||
|
|
||||||
|
export class SearchEngine {
|
||||||
|
private tokenizer: Tokenizer
|
||||||
|
private minisearch: MiniSearch
|
||||||
|
/** Map<path, mtime> */
|
||||||
|
private indexedDocuments: Map<string, number> = new Map()
|
||||||
|
|
||||||
|
// private previousResults: SearchResult[] = []
|
||||||
|
// private previousQuery: Query | null = null
|
||||||
|
|
||||||
|
constructor(protected plugin: LocatorPlugin) {
|
||||||
|
this.tokenizer = new Tokenizer(plugin)
|
||||||
|
this.minisearch = new MiniSearch(this.getOptions())
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Return true if the cache is valid
|
||||||
|
*/
|
||||||
|
async loadCache(): Promise<boolean> {
|
||||||
|
await this.plugin.embedsRepository.loadFromCache()
|
||||||
|
const cache = await this.plugin.database.getMinisearchCache()
|
||||||
|
if (cache) {
|
||||||
|
this.minisearch = await MiniSearch.loadJSAsync(
|
||||||
|
cache.data,
|
||||||
|
this.getOptions()
|
||||||
|
)
|
||||||
|
this.indexedDocuments = new Map(cache.paths.map(o => [o.path, o.mtime]))
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
console.log('Locator - No cache found')
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the list of documents that need to be reindexed or removed,
|
||||||
|
* either because they are new, have been modified, or have been deleted
|
||||||
|
* @param docs
|
||||||
|
*/
|
||||||
|
getDocumentsToReindex(docs: DocumentRef[]): {
|
||||||
|
toAdd: DocumentRef[]
|
||||||
|
toRemove: DocumentRef[]
|
||||||
|
} {
|
||||||
|
const docsMap = new Map(docs.map(d => [d.path, d.mtime]))
|
||||||
|
|
||||||
|
const toAdd = docs.filter(
|
||||||
|
d =>
|
||||||
|
!this.indexedDocuments.has(d.path) ||
|
||||||
|
this.indexedDocuments.get(d.path) !== d.mtime
|
||||||
|
)
|
||||||
|
|
||||||
|
const toRemove = [...this.indexedDocuments]
|
||||||
|
.filter(
|
||||||
|
([path, mtime]) => !docsMap.has(path) || docsMap.get(path) !== mtime
|
||||||
|
)
|
||||||
|
.map(o => ({ path: o[0], mtime: o[1] }))
|
||||||
|
return { toAdd, toRemove }
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Add notes/PDFs/images to the search index
|
||||||
|
* @param paths
|
||||||
|
*/
|
||||||
|
public async addFromPaths(paths: string[]): Promise<void> {
|
||||||
|
logVerbose('Adding files', paths)
|
||||||
|
let documents = (
|
||||||
|
await Promise.all(
|
||||||
|
paths.map(
|
||||||
|
async path => await this.plugin.documentsRepository.getDocument(path)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
).filter(d => !!d?.path)
|
||||||
|
logVerbose('Sorting documents to first index markdown')
|
||||||
|
// Index markdown files first
|
||||||
|
documents = sortBy(documents, d => (d.path.endsWith('.md') ? 0 : 1))
|
||||||
|
|
||||||
|
// If a document is already added, discard it
|
||||||
|
this.removeFromPaths(
|
||||||
|
documents.filter(d => this.indexedDocuments.has(d.path)).map(d => d.path)
|
||||||
|
)
|
||||||
|
|
||||||
|
// Split the documents in smaller chunks to add them to minisearch
|
||||||
|
const chunkedDocs = chunkArray(documents, 500)
|
||||||
|
for (const docs of chunkedDocs) {
|
||||||
|
logVerbose('Indexing into search engine', docs)
|
||||||
|
// Update the list of indexed docs
|
||||||
|
docs.forEach(doc => this.indexedDocuments.set(doc.path, doc.mtime))
|
||||||
|
|
||||||
|
// Discard files that may have been already added (though it shouldn't happen)
|
||||||
|
const alreadyAdded = docs.filter(doc => this.minisearch.has(doc.path))
|
||||||
|
this.removeFromPaths(alreadyAdded.map(o => o.path))
|
||||||
|
|
||||||
|
// Add docs to minisearch
|
||||||
|
await this.minisearch.addAllAsync(docs)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Discard a document from minisearch
|
||||||
|
* @param paths
|
||||||
|
*/
|
||||||
|
public removeFromPaths(paths: string[]): void {
|
||||||
|
paths.forEach(p => this.indexedDocuments.delete(p))
|
||||||
|
// Make sure to not discard a file that we don't have
|
||||||
|
const existing = paths.filter(p => this.minisearch.has(p))
|
||||||
|
this.minisearch.discardAll(existing)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Searches the index for the given query,
|
||||||
|
* and returns an array of raw results
|
||||||
|
*/
|
||||||
|
public async search(
|
||||||
|
query: Query,
|
||||||
|
options: { prefixLength: number; singleFilePath?: string }
|
||||||
|
): Promise<SearchResult[]> {
|
||||||
|
const settings = this.plugin.settings
|
||||||
|
if (query.isEmpty()) {
|
||||||
|
// this.previousResults = []
|
||||||
|
// this.previousQuery = null
|
||||||
|
return []
|
||||||
|
}
|
||||||
|
|
||||||
|
logVerbose('=== New search ===')
|
||||||
|
logVerbose('Starting search for', query)
|
||||||
|
|
||||||
|
let fuzziness: number
|
||||||
|
switch (settings.fuzziness) {
|
||||||
|
case '0':
|
||||||
|
fuzziness = 0
|
||||||
|
break
|
||||||
|
case '1':
|
||||||
|
fuzziness = 0.1
|
||||||
|
break
|
||||||
|
default:
|
||||||
|
fuzziness = 0.2
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
const searchTokens = this.tokenizer.tokenizeForSearch(query.segmentsToStr())
|
||||||
|
logVerbose(JSON.stringify(searchTokens, null, 1))
|
||||||
|
let results = this.minisearch.search(searchTokens, {
|
||||||
|
prefix: term => term.length >= options.prefixLength,
|
||||||
|
// length <= 3: no fuzziness
|
||||||
|
// length <= 5: fuzziness of 10%
|
||||||
|
// length > 5: fuzziness of 20%
|
||||||
|
fuzzy: term =>
|
||||||
|
term.length <= 3 ? 0 : term.length <= 5 ? fuzziness / 2 : fuzziness,
|
||||||
|
boost: {
|
||||||
|
basename: settings.weightBasename,
|
||||||
|
aliases: settings.weightBasename,
|
||||||
|
displayTitle: settings.weightBasename,
|
||||||
|
directory: settings.weightDirectory,
|
||||||
|
headings1: settings.weightH1,
|
||||||
|
headings2: settings.weightH2,
|
||||||
|
headings3: settings.weightH3,
|
||||||
|
tags: settings.weightUnmarkedTags,
|
||||||
|
unmarkedTags: settings.weightUnmarkedTags,
|
||||||
|
},
|
||||||
|
// The query is already tokenized, don't tokenize again
|
||||||
|
tokenize: text => [text],
|
||||||
|
boostDocument(_id, _term, storedFields) {
|
||||||
|
if (
|
||||||
|
!storedFields?.mtime ||
|
||||||
|
settings.recencyBoost === RecencyCutoff.Disabled
|
||||||
|
) {
|
||||||
|
return 1
|
||||||
|
}
|
||||||
|
const mtime = storedFields?.mtime as number
|
||||||
|
const now = new Date().valueOf()
|
||||||
|
const daysElapsed = (now - mtime) / (24 * 3600)
|
||||||
|
|
||||||
|
// Documents boost
|
||||||
|
const cutoff = {
|
||||||
|
[RecencyCutoff.Day]: -3,
|
||||||
|
[RecencyCutoff.Week]: -0.3,
|
||||||
|
[RecencyCutoff.Month]: -0.1,
|
||||||
|
} as const
|
||||||
|
return 1 + Math.exp(cutoff[settings.recencyBoost] * daysElapsed)
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
logVerbose(`Found ${results.length} results`, results)
|
||||||
|
|
||||||
|
// Filter query results to only keep files that match query.query.ext (if any)
|
||||||
|
if (query.query.ext?.length) {
|
||||||
|
results = results.filter(r => {
|
||||||
|
// ".can" should match ".canvas"
|
||||||
|
const ext = '.' + r.id.split('.').pop()
|
||||||
|
return query.query.ext?.some(e =>
|
||||||
|
ext.startsWith(e.startsWith('.') ? e : '.' + e)
|
||||||
|
)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// Filter query results that match the path
|
||||||
|
if (query.query.path) {
|
||||||
|
results = results.filter(r =>
|
||||||
|
query.query.path?.some(p =>
|
||||||
|
(r.id as string).toLowerCase().includes(p.toLowerCase())
|
||||||
|
)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
if (query.query.exclude.path) {
|
||||||
|
results = results.filter(
|
||||||
|
r =>
|
||||||
|
!query.query.exclude.path?.some(p =>
|
||||||
|
(r.id as string).toLowerCase().includes(p.toLowerCase())
|
||||||
|
)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!results.length) {
|
||||||
|
return []
|
||||||
|
}
|
||||||
|
|
||||||
|
if (options.singleFilePath) {
|
||||||
|
return results.filter(r => r.id === options.singleFilePath)
|
||||||
|
}
|
||||||
|
|
||||||
|
logVerbose(
|
||||||
|
'searching with downranked folders',
|
||||||
|
settings.downrankedFoldersFilters
|
||||||
|
)
|
||||||
|
|
||||||
|
// Hide or downrank files that are in Obsidian's excluded list
|
||||||
|
if (settings.hideExcluded) {
|
||||||
|
// Filter the files out
|
||||||
|
results = results.filter(
|
||||||
|
result =>
|
||||||
|
!(
|
||||||
|
this.plugin.app.metadataCache.isUserIgnored &&
|
||||||
|
this.plugin.app.metadataCache.isUserIgnored(result.id)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
// Just downrank them
|
||||||
|
results.forEach(result => {
|
||||||
|
if (
|
||||||
|
this.plugin.app.metadataCache.isUserIgnored &&
|
||||||
|
this.plugin.app.metadataCache.isUserIgnored(result.id)
|
||||||
|
) {
|
||||||
|
result.score /= 10
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// Extract tags from the query
|
||||||
|
const tags = query.getTags()
|
||||||
|
|
||||||
|
for (const result of results) {
|
||||||
|
const path = result.id
|
||||||
|
if (settings.downrankedFoldersFilters.length > 0) {
|
||||||
|
// downrank files that are in folders listed in the downrankedFoldersFilters
|
||||||
|
let downrankingFolder = false
|
||||||
|
settings.downrankedFoldersFilters.forEach(filter => {
|
||||||
|
if (path.startsWith(filter)) {
|
||||||
|
// we don't want the filter to match the folder sources, e.g.
|
||||||
|
// it needs to match a whole folder name
|
||||||
|
if (path === filter || path.startsWith(filter + '/')) {
|
||||||
|
logVerbose('searching with downranked folders in path: ', path)
|
||||||
|
downrankingFolder = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
if (downrankingFolder) {
|
||||||
|
result.score /= 10
|
||||||
|
}
|
||||||
|
const pathParts = path.split('/')
|
||||||
|
const pathPartsLength = pathParts.length
|
||||||
|
for (let i = 0; i < pathPartsLength; i++) {
|
||||||
|
const pathPart = pathParts[i]
|
||||||
|
if (settings.downrankedFoldersFilters.includes(pathPart)) {
|
||||||
|
result.score /= 10
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const metadata = this.plugin.app.metadataCache.getCache(path)
|
||||||
|
if (metadata) {
|
||||||
|
// Boost custom properties
|
||||||
|
for (const { name, weight } of settings.weightCustomProperties) {
|
||||||
|
const values = metadata?.frontmatter?.[name]
|
||||||
|
if (values && result.terms.some(t => values.includes(t))) {
|
||||||
|
logVerbose(`Boosting field "${name}" x${weight} for ${path}`)
|
||||||
|
result.score *= weight
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Put the results with tags on top
|
||||||
|
for (const tag of tags) {
|
||||||
|
if ((result.tags ?? []).includes(tag)) {
|
||||||
|
result.score *= 100
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
logVerbose('Sorting and limiting results')
|
||||||
|
|
||||||
|
// Sort results and keep the 50 best
|
||||||
|
results = results.sort((a, b) => b.score - a.score).slice(0, 50)
|
||||||
|
|
||||||
|
logVerbose('Filtered results:', results)
|
||||||
|
|
||||||
|
if (results.length) logVerbose('First result:', results[0])
|
||||||
|
|
||||||
|
const documents = await Promise.all(
|
||||||
|
results.map(async result => {
|
||||||
|
const doc = await this.plugin.documentsRepository.getDocument(result.id)
|
||||||
|
if (!doc) {
|
||||||
|
console.warn(
|
||||||
|
`Locator - Note "${result.id}" not in the live cache`
|
||||||
|
)
|
||||||
|
countError(true)
|
||||||
|
}
|
||||||
|
return doc
|
||||||
|
})
|
||||||
|
)
|
||||||
|
|
||||||
|
// If the search query contains quotes, filter out results that don't have the exact match
|
||||||
|
const exactTerms = query.getExactTerms()
|
||||||
|
if (exactTerms.length) {
|
||||||
|
logVerbose('Filtering with quoted terms: ', exactTerms)
|
||||||
|
results = results.filter(r => {
|
||||||
|
const document = documents.find(d => d.path === r.id)
|
||||||
|
const title = document?.path.toLowerCase() ?? ''
|
||||||
|
const content = (document?.cleanedContent ?? '').toLowerCase()
|
||||||
|
return exactTerms.every(
|
||||||
|
q =>
|
||||||
|
content.includes(q) ||
|
||||||
|
removeDiacritics(
|
||||||
|
title,
|
||||||
|
this.plugin.settings.ignoreArabicDiacritics
|
||||||
|
).includes(q)
|
||||||
|
)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// If the search query contains exclude terms, filter out results that have them
|
||||||
|
const exclusions = query.query.exclude.text
|
||||||
|
if (exclusions.length) {
|
||||||
|
logVerbose('Filtering with exclusions')
|
||||||
|
results = results.filter(r => {
|
||||||
|
const content = (
|
||||||
|
documents.find(d => d.path === r.id)?.content ?? ''
|
||||||
|
).toLowerCase()
|
||||||
|
return exclusions.every(q => !content.includes(q))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
logVerbose('Deduping')
|
||||||
|
// FIXME:
|
||||||
|
// Dedupe results - clutch for https://github.com/scambier/obsidian-locator/issues/129
|
||||||
|
results = results.filter(
|
||||||
|
(result, index, arr) => arr.findIndex(t => t.id === result.id) === index
|
||||||
|
)
|
||||||
|
|
||||||
|
// this.previousQuery = query
|
||||||
|
// this.previousResults = results
|
||||||
|
|
||||||
|
return results
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Searches the index, and returns an array of ResultNote objects.
|
||||||
|
* If we have the singleFile option set,
|
||||||
|
* the array contains a single result from that file
|
||||||
|
* @param query
|
||||||
|
* @param options
|
||||||
|
* @returns
|
||||||
|
*/
|
||||||
|
public async getSuggestions(
|
||||||
|
query: Query,
|
||||||
|
options?: Partial<{ singleFilePath?: string }>
|
||||||
|
): Promise<ResultNote[]> {
|
||||||
|
// Get the raw results
|
||||||
|
let results: SearchResult[]
|
||||||
|
if (this.plugin.settings.simpleSearch) {
|
||||||
|
results = await this.search(query, {
|
||||||
|
prefixLength: 3,
|
||||||
|
singleFilePath: options?.singleFilePath,
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
results = await this.search(query, {
|
||||||
|
prefixLength: 1,
|
||||||
|
singleFilePath: options?.singleFilePath,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
const documents = await Promise.all(
|
||||||
|
results.map(
|
||||||
|
async result =>
|
||||||
|
await this.plugin.documentsRepository.getDocument(result.id)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
// Inject embeds for images, documents, and PDFs
|
||||||
|
let total = documents.length
|
||||||
|
for (let i = 0; i < total; i++) {
|
||||||
|
const doc = documents[i]
|
||||||
|
if (!doc) continue
|
||||||
|
|
||||||
|
const embeds = this.plugin.embedsRepository
|
||||||
|
.getEmbeds(doc.path)
|
||||||
|
.slice(0, this.plugin.settings.maxEmbeds)
|
||||||
|
|
||||||
|
// Inject embeds in the results
|
||||||
|
for (const embed of embeds) {
|
||||||
|
total++
|
||||||
|
const newDoc = await this.plugin.documentsRepository.getDocument(embed)
|
||||||
|
documents.splice(i + 1, 0, newDoc)
|
||||||
|
results.splice(i + 1, 0, {
|
||||||
|
id: newDoc.path,
|
||||||
|
score: 0,
|
||||||
|
terms: [],
|
||||||
|
queryTerms: [],
|
||||||
|
match: {},
|
||||||
|
isEmbed: true,
|
||||||
|
})
|
||||||
|
i++ // Increment i to skip the newly inserted document
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Map the raw results to get usable suggestions
|
||||||
|
const resultNotes = results.map(result => {
|
||||||
|
logVerbose('Locating matches for', result.id)
|
||||||
|
let note = documents.find(d => d.path === result.id)
|
||||||
|
if (!note) {
|
||||||
|
// throw new Error(`Locator - Note "${result.id}" not indexed`)
|
||||||
|
console.warn(`Locator - Note "${result.id}" not in the live cache`)
|
||||||
|
note = {
|
||||||
|
content: '',
|
||||||
|
basename: result.id,
|
||||||
|
path: result.id,
|
||||||
|
} as IndexedDocument
|
||||||
|
}
|
||||||
|
|
||||||
|
// Clean search matches that match quoted expressions,
|
||||||
|
// and inject those expressions instead
|
||||||
|
const foundWords = [
|
||||||
|
// Matching terms from the result,
|
||||||
|
// do not necessarily match the query
|
||||||
|
...result.terms,
|
||||||
|
|
||||||
|
// Quoted expressions
|
||||||
|
...query.getExactTerms(),
|
||||||
|
|
||||||
|
// Tags, starting with #
|
||||||
|
...query.getTags(),
|
||||||
|
]
|
||||||
|
logVerbose('Matching tokens:', foundWords)
|
||||||
|
|
||||||
|
logVerbose('Getting matches locations...')
|
||||||
|
const matches = this.plugin.textProcessor.getMatches(
|
||||||
|
note.content,
|
||||||
|
foundWords,
|
||||||
|
query
|
||||||
|
)
|
||||||
|
logVerbose(`Matches for note "${note.path}"`, matches)
|
||||||
|
const resultNote: ResultNote = {
|
||||||
|
score: result.score,
|
||||||
|
foundWords,
|
||||||
|
matches,
|
||||||
|
isEmbed: result.isEmbed,
|
||||||
|
...note,
|
||||||
|
}
|
||||||
|
return resultNote
|
||||||
|
})
|
||||||
|
|
||||||
|
logVerbose('Suggestions:', resultNotes)
|
||||||
|
|
||||||
|
return resultNotes
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* For cache saving
|
||||||
|
*/
|
||||||
|
public getSerializedMiniSearch(): AsPlainObject {
|
||||||
|
return this.minisearch.toJSON()
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* For cache saving
|
||||||
|
*/
|
||||||
|
public getSerializedIndexedDocuments(): { path: string; mtime: number }[] {
|
||||||
|
return Array.from(this.indexedDocuments).map(([path, mtime]) => ({
|
||||||
|
path,
|
||||||
|
mtime,
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
|
||||||
|
private getOptions(): Options<IndexedDocument> {
|
||||||
|
return {
|
||||||
|
tokenize: this.tokenizer.tokenizeForIndexing.bind(this.tokenizer),
|
||||||
|
extractField: (doc, fieldName) => {
|
||||||
|
if (fieldName === 'directory') {
|
||||||
|
// return path without the filename
|
||||||
|
const parts = doc.path.split('/')
|
||||||
|
parts.pop()
|
||||||
|
return parts.join('/')
|
||||||
|
}
|
||||||
|
return (doc as any)[fieldName]
|
||||||
|
},
|
||||||
|
processTerm: (term: string) =>
|
||||||
|
(this.plugin.settings.ignoreDiacritics
|
||||||
|
? removeDiacritics(term, this.plugin.settings.ignoreArabicDiacritics)
|
||||||
|
: term
|
||||||
|
).toLowerCase(),
|
||||||
|
idField: 'path',
|
||||||
|
fields: [
|
||||||
|
'basename',
|
||||||
|
// Different from `path`, since `path` is the unique index and needs to include the filename
|
||||||
|
'directory',
|
||||||
|
'aliases',
|
||||||
|
'content',
|
||||||
|
'headings1',
|
||||||
|
'headings2',
|
||||||
|
'headings3',
|
||||||
|
],
|
||||||
|
storeFields: ['tags', 'mtime'],
|
||||||
|
logger(_level, _message, code) {
|
||||||
|
if (code === 'version_conflict') {
|
||||||
|
new Notice(
|
||||||
|
'Locator - Your index cache may be incorrect or corrupted. If this message keeps appearing, go to Settings to clear the cache.',
|
||||||
|
5000
|
||||||
|
)
|
||||||
|
}
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
38
src/search/search-history.ts
Normal file
38
src/search/search-history.ts
Normal file
|
@ -0,0 +1,38 @@
|
||||||
|
import type LocatorPlugin from '../main'
|
||||||
|
|
||||||
|
export class SearchHistory {
|
||||||
|
/**
|
||||||
|
* Show an empty input field next time the user opens Locator modal
|
||||||
|
*/
|
||||||
|
private nextQueryIsEmpty = false
|
||||||
|
|
||||||
|
constructor(private plugin: LocatorPlugin) {}
|
||||||
|
|
||||||
|
public async addToHistory(query: string): Promise<void> {
|
||||||
|
if (!query) {
|
||||||
|
this.nextQueryIsEmpty = true
|
||||||
|
return
|
||||||
|
}
|
||||||
|
this.nextQueryIsEmpty = false
|
||||||
|
const database = this.plugin.database
|
||||||
|
let history = await database.searchHistory.toArray()
|
||||||
|
history = history.filter(s => s.query !== query).reverse()
|
||||||
|
history.unshift({ query })
|
||||||
|
history = history.slice(0, 10)
|
||||||
|
await database.searchHistory.clear()
|
||||||
|
await database.searchHistory.bulkAdd(history)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @returns The search history, in reverse chronological order
|
||||||
|
*/
|
||||||
|
public async getHistory(): Promise<ReadonlyArray<string>> {
|
||||||
|
const data = (await this.plugin.database.searchHistory.toArray())
|
||||||
|
.reverse()
|
||||||
|
.map(o => o.query)
|
||||||
|
if (this.nextQueryIsEmpty) {
|
||||||
|
data.unshift('')
|
||||||
|
}
|
||||||
|
return data
|
||||||
|
}
|
||||||
|
}
|
97
src/search/tokenizer.ts
Normal file
97
src/search/tokenizer.ts
Normal file
|
@ -0,0 +1,97 @@
|
||||||
|
import type { QueryCombination } from 'minisearch'
|
||||||
|
import { BRACKETS_AND_SPACE, chsRegex, SPACE_OR_PUNCTUATION } from '../globals'
|
||||||
|
import { logVerbose, splitCamelCase, splitHyphens } from '../tools/utils'
|
||||||
|
import type LocatorPlugin from '../main'
|
||||||
|
|
||||||
|
const markdownLinkExtractor = require('markdown-link-extractor')
|
||||||
|
|
||||||
|
export class Tokenizer {
|
||||||
|
constructor(private plugin: LocatorPlugin) {}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Tokenization for indexing will possibly return more tokens than the original text.
|
||||||
|
* This is because we combine different methods of tokenization to get the best results.
|
||||||
|
* @param text
|
||||||
|
* @returns
|
||||||
|
*/
|
||||||
|
public tokenizeForIndexing(text: string): string[] {
|
||||||
|
try {
|
||||||
|
const words = this.tokenizeWords(text)
|
||||||
|
let urls: string[] = []
|
||||||
|
if (this.plugin.settings.tokenizeUrls) {
|
||||||
|
try {
|
||||||
|
urls = markdownLinkExtractor(text)
|
||||||
|
} catch (e) {
|
||||||
|
logVerbose('Error extracting urls', e)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let tokens = this.tokenizeTokens(text, { skipChs: true })
|
||||||
|
tokens = [...tokens.flatMap(token => [
|
||||||
|
token,
|
||||||
|
...splitHyphens(token),
|
||||||
|
...splitCamelCase(token),
|
||||||
|
]), ...words]
|
||||||
|
|
||||||
|
// Add urls
|
||||||
|
if (urls.length) {
|
||||||
|
tokens = [...tokens, ...urls]
|
||||||
|
}
|
||||||
|
|
||||||
|
// Remove duplicates
|
||||||
|
tokens = [...new Set(tokens)]
|
||||||
|
|
||||||
|
return tokens
|
||||||
|
} catch (e) {
|
||||||
|
console.error('Error tokenizing text, skipping document', e)
|
||||||
|
return []
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Search tokenization will use the same tokenization methods as indexing,
|
||||||
|
* but will combine each group with "OR" operators
|
||||||
|
* @param text
|
||||||
|
* @returns
|
||||||
|
*/
|
||||||
|
public tokenizeForSearch(text: string): QueryCombination {
|
||||||
|
// Extract urls and remove them from the query
|
||||||
|
const urls: string[] = markdownLinkExtractor(text)
|
||||||
|
text = urls.reduce((acc, url) => acc.replace(url, ''), text)
|
||||||
|
|
||||||
|
const tokens = [...this.tokenizeTokens(text), ...urls].filter(Boolean)
|
||||||
|
|
||||||
|
return {
|
||||||
|
combineWith: 'OR',
|
||||||
|
queries: [
|
||||||
|
{ combineWith: 'AND', queries: tokens },
|
||||||
|
{
|
||||||
|
combineWith: 'AND',
|
||||||
|
queries: this.tokenizeWords(text).filter(Boolean),
|
||||||
|
},
|
||||||
|
{ combineWith: 'AND', queries: tokens.flatMap(splitHyphens) },
|
||||||
|
{ combineWith: 'AND', queries: tokens.flatMap(splitCamelCase) },
|
||||||
|
],
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private tokenizeWords(text: string, { skipChs = false } = {}): string[] {
|
||||||
|
const tokens = text.split(BRACKETS_AND_SPACE)
|
||||||
|
if (skipChs) return tokens
|
||||||
|
return this.tokenizeChsWord(tokens)
|
||||||
|
}
|
||||||
|
|
||||||
|
private tokenizeTokens(text: string, { skipChs = false } = {}): string[] {
|
||||||
|
const tokens = text.split(SPACE_OR_PUNCTUATION)
|
||||||
|
if (skipChs) return tokens
|
||||||
|
return this.tokenizeChsWord(tokens)
|
||||||
|
}
|
||||||
|
|
||||||
|
private tokenizeChsWord(tokens: string[]): string[] {
|
||||||
|
const segmenter = this.plugin.getChsSegmenter()
|
||||||
|
if (!segmenter) return tokens
|
||||||
|
return tokens.flatMap(word =>
|
||||||
|
chsRegex.test(word) ? segmenter.cut(word, { search: true }) : [word]
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
170
src/settings/index.ts
Normal file
170
src/settings/index.ts
Normal file
|
@ -0,0 +1,170 @@
|
||||||
|
// noinspection CssUnresolvedCustomProperty
|
||||||
|
import {
|
||||||
|
App,
|
||||||
|
Plugin,
|
||||||
|
PluginSettingTab,
|
||||||
|
Setting,
|
||||||
|
} from 'obsidian'
|
||||||
|
import { writable } from 'svelte/store'
|
||||||
|
import { K_DISABLE_OMNISEARCH, RecencyCutoff } from '../globals'
|
||||||
|
import type LocatorPlugin from '../main'
|
||||||
|
import { enableVerboseLogging } from '../tools/utils'
|
||||||
|
import { injectSettingsIndexing } from './settings-indexing'
|
||||||
|
import { type LocatorSettings, saveSettings } from './utils'
|
||||||
|
import { injectSettingsBehavior } from './settings-behavior'
|
||||||
|
import { injectSettingsUserInterface } from './settings-ui'
|
||||||
|
import { injectSettingsWeighting } from './settings-weighting'
|
||||||
|
import { injectSettingsHttp } from './settings-http'
|
||||||
|
import { injectSettingsDanger } from './settings-danger'
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A store to reactively toggle the `showExcerpt` setting on the fly
|
||||||
|
*/
|
||||||
|
export const showExcerpt = writable(false)
|
||||||
|
|
||||||
|
export class SettingsTab extends PluginSettingTab {
|
||||||
|
plugin: LocatorPlugin
|
||||||
|
|
||||||
|
constructor(plugin: LocatorPlugin) {
|
||||||
|
super(plugin.app, plugin)
|
||||||
|
this.plugin = plugin
|
||||||
|
|
||||||
|
showExcerpt.subscribe(async v => {
|
||||||
|
settings.showExcerpt = v
|
||||||
|
await saveSettings(this.plugin)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
display(): void {
|
||||||
|
const { containerEl } = this
|
||||||
|
const database = this.plugin.database
|
||||||
|
|
||||||
|
containerEl.empty()
|
||||||
|
|
||||||
|
if (this.app.loadLocalStorage(K_DISABLE_OMNISEARCH) == '1') {
|
||||||
|
const span = containerEl.createEl('span')
|
||||||
|
span.innerHTML = `<strong style="color: var(--text-accent)">⚠️ OMNISEARCH IS DISABLED ⚠️</strong>`
|
||||||
|
}
|
||||||
|
|
||||||
|
// Settings main title
|
||||||
|
containerEl.createEl('h1', { text: 'Locator' })
|
||||||
|
|
||||||
|
// Sponsor link - Thank you!
|
||||||
|
const divSponsor = containerEl.createDiv()
|
||||||
|
divSponsor.innerHTML = `
|
||||||
|
<iframe sandbox="allow-top-navigation-by-user-activation" src="https://github.com/sponsors/scambier/button" title="Sponsor scambier" height="35" width="116" style="border: 0;"></iframe>
|
||||||
|
<a href='https://ko-fi.com/B0B6LQ2C' target='_blank'><img height='36' style='border:0px;height:36px;' src='https://cdn.ko-fi.com/cdn/kofi2.png?v=3' border='0' alt='Buy Me a Coffee at ko-fi.com' /></a>
|
||||||
|
`
|
||||||
|
|
||||||
|
injectSettingsIndexing(this.plugin, settings, containerEl)
|
||||||
|
containerEl.createEl('hr')
|
||||||
|
injectSettingsBehavior(this.plugin, settings, containerEl)
|
||||||
|
containerEl.createEl('hr')
|
||||||
|
injectSettingsUserInterface(this.plugin, settings, containerEl)
|
||||||
|
containerEl.createEl('hr')
|
||||||
|
injectSettingsWeighting(this.plugin, settings, containerEl, this.display)
|
||||||
|
containerEl.createEl('hr')
|
||||||
|
injectSettingsHttp(this.plugin, settings, containerEl)
|
||||||
|
containerEl.createEl('hr')
|
||||||
|
injectSettingsDanger(this.plugin, settings, containerEl)
|
||||||
|
containerEl.createEl('hr')
|
||||||
|
|
||||||
|
//#region Debugging
|
||||||
|
|
||||||
|
new Setting(containerEl).setName('Debugging').setHeading()
|
||||||
|
|
||||||
|
new Setting(containerEl)
|
||||||
|
.setName('Enable verbose logging')
|
||||||
|
.setDesc(
|
||||||
|
'Adds a LOT of logs for debugging purposes. You also need to enable "Verbose" logging in the console to see these logs.'
|
||||||
|
)
|
||||||
|
.addToggle(toggle =>
|
||||||
|
toggle.setValue(settings.verboseLogging).onChange(async v => {
|
||||||
|
settings.verboseLogging = v
|
||||||
|
enableVerboseLogging(v)
|
||||||
|
await saveSettings(this.plugin)
|
||||||
|
})
|
||||||
|
)
|
||||||
|
|
||||||
|
//#endregion Debugging
|
||||||
|
|
||||||
|
//#region Danger Zone
|
||||||
|
|
||||||
|
//#endregion Danger Zone
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export function getDefaultSettings(app: App): LocatorSettings {
|
||||||
|
return {
|
||||||
|
useCache: true,
|
||||||
|
hideExcluded: false,
|
||||||
|
recencyBoost: RecencyCutoff.Disabled,
|
||||||
|
downrankedFoldersFilters: [] as string[],
|
||||||
|
ignoreDiacritics: true,
|
||||||
|
ignoreArabicDiacritics: false,
|
||||||
|
indexedFileTypes: [] as string[],
|
||||||
|
displayTitle: '',
|
||||||
|
PDFIndexing: false,
|
||||||
|
officeIndexing: false,
|
||||||
|
imagesIndexing: false,
|
||||||
|
aiImageIndexing: false,
|
||||||
|
unsupportedFilesIndexing: 'default',
|
||||||
|
splitCamelCase: false,
|
||||||
|
openInNewPane: false,
|
||||||
|
vimLikeNavigationShortcut: app.vault.getConfig('vimMode') as boolean,
|
||||||
|
|
||||||
|
ribbonIcon: true,
|
||||||
|
showExcerpt: true,
|
||||||
|
maxEmbeds: 5,
|
||||||
|
renderLineReturnInExcerpts: true,
|
||||||
|
showCreateButton: false,
|
||||||
|
highlight: true,
|
||||||
|
showPreviousQueryResults: true,
|
||||||
|
simpleSearch: false,
|
||||||
|
tokenizeUrls: false,
|
||||||
|
fuzziness: '1',
|
||||||
|
|
||||||
|
weightBasename: 10,
|
||||||
|
weightDirectory: 7,
|
||||||
|
weightH1: 6,
|
||||||
|
weightH2: 5,
|
||||||
|
weightH3: 4,
|
||||||
|
weightUnmarkedTags: 2,
|
||||||
|
weightCustomProperties: [] as { name: string; weight: number }[],
|
||||||
|
|
||||||
|
httpApiEnabled: false,
|
||||||
|
httpApiPort: '51361',
|
||||||
|
httpApiNotice: true,
|
||||||
|
|
||||||
|
welcomeMessage: '',
|
||||||
|
verboseLogging: false,
|
||||||
|
|
||||||
|
DANGER_httpHost: null,
|
||||||
|
DANGER_forceSaveCache: false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export let settings: LocatorSettings
|
||||||
|
|
||||||
|
// /**
|
||||||
|
// * @deprecated
|
||||||
|
// */
|
||||||
|
// export function getSettings(): LocatorSettings {
|
||||||
|
// if (!settings) {
|
||||||
|
// settings = Object.assign({}, getDefaultSettings()) as LocatorSettings
|
||||||
|
// }
|
||||||
|
// return settings
|
||||||
|
// }
|
||||||
|
|
||||||
|
export async function loadSettings(
|
||||||
|
plugin: Plugin
|
||||||
|
): Promise<LocatorSettings> {
|
||||||
|
settings = Object.assign(
|
||||||
|
{},
|
||||||
|
getDefaultSettings(plugin.app),
|
||||||
|
await plugin.loadData()
|
||||||
|
)
|
||||||
|
showExcerpt.set(settings.showExcerpt)
|
||||||
|
enableVerboseLogging(settings.verboseLogging)
|
||||||
|
return settings
|
||||||
|
}
|
166
src/settings/settings-behavior.ts
Normal file
166
src/settings/settings-behavior.ts
Normal file
|
@ -0,0 +1,166 @@
|
||||||
|
import { Platform, Setting } from 'obsidian'
|
||||||
|
import type { LocatorSettings } from './utils'
|
||||||
|
import { saveSettings } from './utils'
|
||||||
|
import { htmlDescription, needsARestart } from './utils'
|
||||||
|
import type LocatorPlugin from 'src/main'
|
||||||
|
import { getCtrlKeyLabel } from 'src/tools/utils'
|
||||||
|
|
||||||
|
export function injectSettingsBehavior(
|
||||||
|
plugin: LocatorPlugin,
|
||||||
|
settings: LocatorSettings,
|
||||||
|
containerEl: HTMLElement
|
||||||
|
) {
|
||||||
|
const database = plugin.database
|
||||||
|
|
||||||
|
new Setting(containerEl).setName('Behavior').setHeading()
|
||||||
|
|
||||||
|
// Caching
|
||||||
|
new Setting(containerEl)
|
||||||
|
.setName('Save index to cache')
|
||||||
|
.setDesc(
|
||||||
|
'Enable caching to speed up indexing time. In rare cases, the cache write may cause a crash in Obsidian. This option will disable itself if it happens.'
|
||||||
|
)
|
||||||
|
.addToggle(toggle =>
|
||||||
|
toggle.setValue(settings.useCache).onChange(async v => {
|
||||||
|
settings.useCache = v
|
||||||
|
await saveSettings(plugin)
|
||||||
|
})
|
||||||
|
)
|
||||||
|
|
||||||
|
// Show previous query results
|
||||||
|
new Setting(containerEl)
|
||||||
|
.setName('Show previous query results')
|
||||||
|
.setDesc('Re-executes the previous query when opening Locator.')
|
||||||
|
.addToggle(toggle =>
|
||||||
|
toggle.setValue(settings.showPreviousQueryResults).onChange(async v => {
|
||||||
|
settings.showPreviousQueryResults = v
|
||||||
|
await saveSettings(plugin)
|
||||||
|
})
|
||||||
|
)
|
||||||
|
|
||||||
|
// Respect excluded files
|
||||||
|
new Setting(containerEl)
|
||||||
|
.setName('Respect Obsidian\'s "Excluded Files"')
|
||||||
|
.setDesc(
|
||||||
|
`By default, files that are in Obsidian\'s "Options > Files & Links > Excluded Files" list are downranked in results.
|
||||||
|
Enable this option to completely hide them.`
|
||||||
|
)
|
||||||
|
.addToggle(toggle =>
|
||||||
|
toggle.setValue(settings.hideExcluded).onChange(async v => {
|
||||||
|
settings.hideExcluded = v
|
||||||
|
await saveSettings(plugin)
|
||||||
|
})
|
||||||
|
)
|
||||||
|
|
||||||
|
// Downranked files
|
||||||
|
new Setting(containerEl)
|
||||||
|
.setName('Folders to downrank in search results')
|
||||||
|
.setDesc(
|
||||||
|
`Folders to downrank in search results. Files in these folders will be downranked in results. They will still be indexed for tags, unlike excluded files. Folders should be comma delimited.`
|
||||||
|
)
|
||||||
|
.addText(component => {
|
||||||
|
component
|
||||||
|
.setValue(settings.downrankedFoldersFilters.join(','))
|
||||||
|
.setPlaceholder('Example: src,p2/dir')
|
||||||
|
.onChange(async v => {
|
||||||
|
let folders = v.split(',')
|
||||||
|
folders = folders.map(f => f.trim())
|
||||||
|
settings.downrankedFoldersFilters = folders
|
||||||
|
await saveSettings(plugin)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
// Split CamelCaseWords
|
||||||
|
new Setting(containerEl)
|
||||||
|
.setName('Split CamelCaseWords')
|
||||||
|
.setDesc(
|
||||||
|
htmlDescription(`Enable this if you want to be able to search for CamelCaseWords as separate words.<br/>
|
||||||
|
⚠️ <span style="color: var(--text-accent)">Changing this setting will clear the cache.</span><br>
|
||||||
|
${needsARestart}`)
|
||||||
|
)
|
||||||
|
.addToggle(toggle =>
|
||||||
|
toggle.setValue(settings.splitCamelCase).onChange(async v => {
|
||||||
|
await database.clearCache()
|
||||||
|
settings.splitCamelCase = v
|
||||||
|
await saveSettings(plugin)
|
||||||
|
})
|
||||||
|
)
|
||||||
|
|
||||||
|
// Simpler search
|
||||||
|
new Setting(containerEl)
|
||||||
|
.setName('Simpler search')
|
||||||
|
.setDesc(
|
||||||
|
`Enable this if Obsidian often freezes while making searches.
|
||||||
|
Words shorter than 3 characters won't be used as prefixes; this can reduce search delay but will return fewer results.`
|
||||||
|
)
|
||||||
|
.addToggle(toggle =>
|
||||||
|
toggle.setValue(settings.simpleSearch).onChange(async v => {
|
||||||
|
settings.simpleSearch = v
|
||||||
|
await saveSettings(plugin)
|
||||||
|
})
|
||||||
|
)
|
||||||
|
|
||||||
|
// Extract URLs
|
||||||
|
// Crashes on iOS
|
||||||
|
if (!Platform.isIosApp) {
|
||||||
|
new Setting(containerEl)
|
||||||
|
.setName('Tokenize URLs')
|
||||||
|
.setDesc(
|
||||||
|
`Enable this if you want to be able to search for URLs as separate words.
|
||||||
|
This setting has a strong impact on indexing performance, and can crash Obsidian under certain conditions.`
|
||||||
|
)
|
||||||
|
.addToggle(toggle =>
|
||||||
|
toggle.setValue(settings.tokenizeUrls).onChange(async v => {
|
||||||
|
settings.tokenizeUrls = v
|
||||||
|
await saveSettings(plugin)
|
||||||
|
})
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Open in new pane
|
||||||
|
new Setting(containerEl)
|
||||||
|
.setName('Open in new pane')
|
||||||
|
.setDesc('Open and create files in a new pane instead of the current pane.')
|
||||||
|
.addToggle(toggle =>
|
||||||
|
toggle.setValue(settings.openInNewPane).onChange(async v => {
|
||||||
|
settings.openInNewPane = v
|
||||||
|
await saveSettings(plugin)
|
||||||
|
})
|
||||||
|
)
|
||||||
|
|
||||||
|
// Set Vim like navigation keys
|
||||||
|
new Setting(containerEl)
|
||||||
|
.setName('Set Vim like navigation keys')
|
||||||
|
.setDesc(
|
||||||
|
`Navigate down the results with ${getCtrlKeyLabel()} + J/N, or navigate up with ${getCtrlKeyLabel()} + K/P.`
|
||||||
|
)
|
||||||
|
.addToggle(toggle =>
|
||||||
|
toggle.setValue(settings.vimLikeNavigationShortcut).onChange(async v => {
|
||||||
|
settings.vimLikeNavigationShortcut = v
|
||||||
|
await saveSettings(plugin)
|
||||||
|
})
|
||||||
|
)
|
||||||
|
|
||||||
|
// Fuzziness
|
||||||
|
new Setting(containerEl)
|
||||||
|
.setName('Fuzziness')
|
||||||
|
.setDesc(
|
||||||
|
"Define the level of fuzziness for the search. The higher the fuzziness, the more results you'll get."
|
||||||
|
)
|
||||||
|
.addDropdown(dropdown =>
|
||||||
|
dropdown
|
||||||
|
.addOptions({
|
||||||
|
0: 'Exact match',
|
||||||
|
1: 'Not too fuzzy',
|
||||||
|
2: 'Fuzzy enough',
|
||||||
|
})
|
||||||
|
.setValue(settings.fuzziness)
|
||||||
|
.onChange(async v => {
|
||||||
|
if (!['0', '1', '2'].includes(v)) {
|
||||||
|
v = '2'
|
||||||
|
}
|
||||||
|
settings.fuzziness = v as '0' | '1' | '2'
|
||||||
|
await saveSettings(plugin)
|
||||||
|
})
|
||||||
|
)
|
||||||
|
}
|
97
src/settings/settings-danger.ts
Normal file
97
src/settings/settings-danger.ts
Normal file
|
@ -0,0 +1,97 @@
|
||||||
|
import { Notice, Setting } from 'obsidian'
|
||||||
|
import type { LocatorSettings } from './utils'
|
||||||
|
import { isCacheEnabled } from './utils'
|
||||||
|
import { saveSettings } from './utils'
|
||||||
|
import { htmlDescription, isPluginDisabled, needsARestart } from './utils'
|
||||||
|
import type LocatorPlugin from 'src/main'
|
||||||
|
import { K_DISABLE_OMNISEARCH } from 'src/globals'
|
||||||
|
|
||||||
|
export function injectSettingsDanger(
|
||||||
|
plugin: LocatorPlugin,
|
||||||
|
settings: LocatorSettings,
|
||||||
|
containerEl: HTMLElement
|
||||||
|
) {
|
||||||
|
const database = plugin.database
|
||||||
|
|
||||||
|
new Setting(containerEl).setName('Danger Zone').setHeading()
|
||||||
|
|
||||||
|
// Ignore diacritics
|
||||||
|
new Setting(containerEl)
|
||||||
|
.setName('Ignore diacritics')
|
||||||
|
.setDesc(
|
||||||
|
htmlDescription(`Normalize diacritics in search terms. Words like "brûlée" or "žluťoučký" will be indexed as "brulee" and "zlutoucky".<br/>
|
||||||
|
⚠️ <span style="color: var(--text-accent)">You probably should <strong>NOT</strong> disable this.</span><br>
|
||||||
|
⚠️ <span style="color: var(--text-accent)">Changing this setting will clear the cache.</span><br>
|
||||||
|
${needsARestart}`)
|
||||||
|
)
|
||||||
|
.addToggle(toggle =>
|
||||||
|
toggle.setValue(settings.ignoreDiacritics).onChange(async v => {
|
||||||
|
await database.clearCache()
|
||||||
|
settings.ignoreDiacritics = v
|
||||||
|
await saveSettings(plugin)
|
||||||
|
})
|
||||||
|
)
|
||||||
|
|
||||||
|
new Setting(containerEl)
|
||||||
|
.setName('Ignore Arabic diacritics (beta)')
|
||||||
|
.addToggle(toggle =>
|
||||||
|
toggle.setValue(settings.ignoreArabicDiacritics).onChange(async v => {
|
||||||
|
await database.clearCache()
|
||||||
|
settings.ignoreArabicDiacritics = v
|
||||||
|
await saveSettings(plugin)
|
||||||
|
})
|
||||||
|
)
|
||||||
|
|
||||||
|
// Disable Locator
|
||||||
|
const disableDesc = new DocumentFragment()
|
||||||
|
disableDesc.createSpan({}, span => {
|
||||||
|
span.innerHTML = `Disable Locator on this device only.<br>
|
||||||
|
${needsARestart}`
|
||||||
|
})
|
||||||
|
new Setting(containerEl)
|
||||||
|
.setName('Disable on this device')
|
||||||
|
.setDesc(disableDesc)
|
||||||
|
.addToggle(toggle =>
|
||||||
|
toggle.setValue(isPluginDisabled(plugin.app)).onChange(async v => {
|
||||||
|
if (v) {
|
||||||
|
plugin.app.saveLocalStorage(K_DISABLE_OMNISEARCH, '1')
|
||||||
|
new Notice('Locator - Disabled. Please restart Obsidian.')
|
||||||
|
} else {
|
||||||
|
plugin.app.saveLocalStorage(K_DISABLE_OMNISEARCH) // No value = unset
|
||||||
|
new Notice('Locator - Enabled. Please restart Obsidian.')
|
||||||
|
}
|
||||||
|
})
|
||||||
|
)
|
||||||
|
|
||||||
|
// Force save cache
|
||||||
|
new Setting(containerEl)
|
||||||
|
.setName('Force save the cache')
|
||||||
|
.setDesc(
|
||||||
|
htmlDescription(`Locator has a security feature that automatically disables cache writing if it cannot fully perform the operation.<br>
|
||||||
|
Use this option to force the cache to be saved, even if it causes a crash.<br>
|
||||||
|
⚠️ <span style="color: var(--text-accent)">Enabling this setting could lead to crash loops</span>`)
|
||||||
|
)
|
||||||
|
.addToggle(toggle =>
|
||||||
|
toggle.setValue(settings.DANGER_forceSaveCache).onChange(async v => {
|
||||||
|
settings.DANGER_forceSaveCache = v
|
||||||
|
await saveSettings(plugin)
|
||||||
|
})
|
||||||
|
)
|
||||||
|
|
||||||
|
// Clear cache data
|
||||||
|
if (isCacheEnabled()) {
|
||||||
|
new Setting(containerEl)
|
||||||
|
.setName('Clear cache data')
|
||||||
|
.setDesc(
|
||||||
|
htmlDescription(`Erase all Locator cache data.
|
||||||
|
Use this if Locator results are inconsistent, missing, or appear outdated.<br>
|
||||||
|
${needsARestart}`)
|
||||||
|
)
|
||||||
|
.addButton(btn => {
|
||||||
|
btn.setButtonText('Clear cache')
|
||||||
|
btn.onClick(async () => {
|
||||||
|
await database.clearCache()
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
66
src/settings/settings-http.ts
Normal file
66
src/settings/settings-http.ts
Normal file
|
@ -0,0 +1,66 @@
|
||||||
|
import { Platform, Setting } from 'obsidian'
|
||||||
|
import type { LocatorSettings } from './utils'
|
||||||
|
import { saveSettings } from './utils'
|
||||||
|
import { htmlDescription } from './utils'
|
||||||
|
import type LocatorPlugin from 'src/main'
|
||||||
|
|
||||||
|
export function injectSettingsHttp(
|
||||||
|
plugin: LocatorPlugin,
|
||||||
|
settings: LocatorSettings,
|
||||||
|
containerEl: HTMLElement
|
||||||
|
) {
|
||||||
|
if (!Platform.isMobile) {
|
||||||
|
new Setting(containerEl)
|
||||||
|
.setName('API Access Through HTTP')
|
||||||
|
.setHeading()
|
||||||
|
.setDesc(
|
||||||
|
htmlDescription(
|
||||||
|
`Locator can be used through a simple HTTP server (<a href="https://publish.obsidian.md/locator/Public+API+%26+URL+Scheme#HTTP+Server">more information</a>).`
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
new Setting(containerEl)
|
||||||
|
.setName('Enable the HTTP server')
|
||||||
|
.addToggle(toggle =>
|
||||||
|
toggle.setValue(settings.httpApiEnabled).onChange(async v => {
|
||||||
|
settings.httpApiEnabled = v
|
||||||
|
if (v) {
|
||||||
|
plugin.apiHttpServer.listen(settings.httpApiPort)
|
||||||
|
} else {
|
||||||
|
plugin.apiHttpServer.close()
|
||||||
|
}
|
||||||
|
await saveSettings(plugin)
|
||||||
|
})
|
||||||
|
)
|
||||||
|
|
||||||
|
new Setting(containerEl).setName('HTTP Port').addText(component => {
|
||||||
|
component
|
||||||
|
.setValue(settings.httpApiPort)
|
||||||
|
.setPlaceholder('51361')
|
||||||
|
.onChange(async v => {
|
||||||
|
if (parseInt(v) > 65535) {
|
||||||
|
v = settings.httpApiPort
|
||||||
|
component.setValue(settings.httpApiPort)
|
||||||
|
}
|
||||||
|
settings.httpApiPort = v
|
||||||
|
if (settings.httpApiEnabled) {
|
||||||
|
plugin.apiHttpServer.close()
|
||||||
|
plugin.apiHttpServer.listen(settings.httpApiPort)
|
||||||
|
}
|
||||||
|
await saveSettings(plugin)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
new Setting(containerEl)
|
||||||
|
.setName('Show a notification when the server starts')
|
||||||
|
.setDesc(
|
||||||
|
'Will display a notification if the server is enabled, at Obsidian startup.'
|
||||||
|
)
|
||||||
|
.addToggle(toggle =>
|
||||||
|
toggle.setValue(settings.httpApiNotice).onChange(async v => {
|
||||||
|
settings.httpApiNotice = v
|
||||||
|
await saveSettings(plugin)
|
||||||
|
})
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
161
src/settings/settings-indexing.ts
Normal file
161
src/settings/settings-indexing.ts
Normal file
|
@ -0,0 +1,161 @@
|
||||||
|
import { Setting } from 'obsidian'
|
||||||
|
import type { LocatorSettings } from './utils'
|
||||||
|
import { saveSettings } from './utils'
|
||||||
|
import { htmlDescription } from './utils'
|
||||||
|
import type LocatorPlugin from 'src/main'
|
||||||
|
import { debounce } from 'lodash-es'
|
||||||
|
|
||||||
|
export function injectSettingsIndexing(
|
||||||
|
plugin: LocatorPlugin,
|
||||||
|
settings: LocatorSettings,
|
||||||
|
containerEl: HTMLElement
|
||||||
|
) {
|
||||||
|
const textExtractor = plugin.getTextExtractor()
|
||||||
|
const aiImageAnalyzer = plugin.getAIImageAnalyzer()
|
||||||
|
const database = plugin.database
|
||||||
|
|
||||||
|
const clearCacheDebounced = debounce(async () => {
|
||||||
|
await database.clearCache()
|
||||||
|
}, 1000)
|
||||||
|
|
||||||
|
new Setting(containerEl)
|
||||||
|
.setName('Indexing')
|
||||||
|
.setHeading()
|
||||||
|
.setDesc(
|
||||||
|
htmlDescription(`⚠️ <span style="color: var(--text-accent)">Changing indexing settings will clear the cache, and requires a restart of Obsidian.</span><br/><br/>
|
||||||
|
${
|
||||||
|
textExtractor
|
||||||
|
? `👍 You have installed <a href="https://github.com/scambier/obsidian-text-extractor">Text Extractor</a>, Locator can use it to index PDFs and images contents.
|
||||||
|
<br />Text extraction only works on desktop, but the cache can be synchronized with your mobile device.`
|
||||||
|
: `⚠️ Locator requires <a href="https://github.com/scambier/obsidian-text-extractor">Text Extractor</a> to index PDFs and images.`
|
||||||
|
}
|
||||||
|
${
|
||||||
|
aiImageAnalyzer
|
||||||
|
? `<br/>👍 You have installed <a href="https://github.com/Swaggeroo/obsidian-ai-image-analyzer">AI Image Analyzer</a>, Locator can use it to index images contents with ai.`
|
||||||
|
: `<br/>⚠️ Locator requires <a href="https://github.com/Swaggeroo/obsidian-ai-image-analyzer">AI Image Analyzer</a> to index images with ai.`
|
||||||
|
}`)
|
||||||
|
)
|
||||||
|
|
||||||
|
// PDF Indexing
|
||||||
|
new Setting(containerEl)
|
||||||
|
.setName(`PDFs content indexing ${textExtractor ? '' : '⚠️ Disabled'}`)
|
||||||
|
.setDesc(
|
||||||
|
htmlDescription(
|
||||||
|
`Locator will use Text Extractor to index the content of your PDFs.`
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.addToggle(toggle =>
|
||||||
|
toggle.setValue(settings.PDFIndexing).onChange(async v => {
|
||||||
|
await database.clearCache()
|
||||||
|
settings.PDFIndexing = v
|
||||||
|
await saveSettings(plugin)
|
||||||
|
})
|
||||||
|
)
|
||||||
|
.setDisabled(!textExtractor)
|
||||||
|
|
||||||
|
// Images Indexing
|
||||||
|
new Setting(containerEl)
|
||||||
|
.setName(`Images OCR indexing ${textExtractor ? '' : '⚠️ Disabled'}`)
|
||||||
|
.setDesc(
|
||||||
|
htmlDescription(
|
||||||
|
`Locator will use Text Extractor to OCR your images and index their content.`
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.addToggle(toggle =>
|
||||||
|
toggle.setValue(settings.imagesIndexing).onChange(async v => {
|
||||||
|
await database.clearCache()
|
||||||
|
settings.imagesIndexing = v
|
||||||
|
await saveSettings(plugin)
|
||||||
|
})
|
||||||
|
)
|
||||||
|
.setDisabled(!textExtractor)
|
||||||
|
|
||||||
|
// Office Documents Indexing
|
||||||
|
const indexOfficesDesc = new DocumentFragment()
|
||||||
|
indexOfficesDesc.createSpan({}, span => {
|
||||||
|
span.innerHTML = `Locator will use Text Extractor to index the content of your office documents (currently <pre style="display:inline">.docx</pre> and <pre style="display:inline">.xlsx</pre>).`
|
||||||
|
})
|
||||||
|
new Setting(containerEl)
|
||||||
|
.setName(`Documents content indexing ${textExtractor ? '' : '⚠️ Disabled'}`)
|
||||||
|
.setDesc(indexOfficesDesc)
|
||||||
|
.addToggle(toggle =>
|
||||||
|
toggle.setValue(settings.officeIndexing).onChange(async v => {
|
||||||
|
await database.clearCache()
|
||||||
|
settings.officeIndexing = v
|
||||||
|
await saveSettings(plugin)
|
||||||
|
})
|
||||||
|
)
|
||||||
|
.setDisabled(!textExtractor)
|
||||||
|
|
||||||
|
// AI Images Indexing
|
||||||
|
const aiIndexImagesDesc = new DocumentFragment()
|
||||||
|
aiIndexImagesDesc.createSpan({}, span => {
|
||||||
|
span.innerHTML = `Locator will use AI Image Analyzer to index the content of your images with ai.`
|
||||||
|
})
|
||||||
|
new Setting(containerEl)
|
||||||
|
.setName(`Images AI indexing ${aiImageAnalyzer ? '' : '⚠️ Disabled'}`)
|
||||||
|
.setDesc(aiIndexImagesDesc)
|
||||||
|
.addToggle(toggle =>
|
||||||
|
toggle.setValue(settings.aiImageIndexing).onChange(async v => {
|
||||||
|
await database.clearCache()
|
||||||
|
settings.aiImageIndexing = v
|
||||||
|
await saveSettings(plugin)
|
||||||
|
})
|
||||||
|
)
|
||||||
|
.setDisabled(!aiImageAnalyzer)
|
||||||
|
|
||||||
|
// Index filenames of unsupported files
|
||||||
|
new Setting(containerEl)
|
||||||
|
.setName('Index paths of unsupported files')
|
||||||
|
.setDesc(
|
||||||
|
htmlDescription(`
|
||||||
|
Locator can index file<strong>names</strong> of "unsupported" files, such as e.g. <pre style="display:inline">.mp4</pre>
|
||||||
|
or non-extracted PDFs & images.<br/>
|
||||||
|
"Obsidian setting" will respect the value of "Files & Links > Detect all file extensions".`)
|
||||||
|
)
|
||||||
|
.addDropdown(dropdown => {
|
||||||
|
dropdown
|
||||||
|
.addOptions({ yes: 'Yes', no: 'No', default: 'Obsidian setting' })
|
||||||
|
.setValue(settings.unsupportedFilesIndexing)
|
||||||
|
.onChange(async v => {
|
||||||
|
await clearCacheDebounced()
|
||||||
|
;(settings.unsupportedFilesIndexing as any) = v
|
||||||
|
await saveSettings(plugin)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
// Custom display title
|
||||||
|
new Setting(containerEl)
|
||||||
|
.setName('Set frontmatter property key as title')
|
||||||
|
.setDesc(
|
||||||
|
htmlDescription(`If you have a custom property in your notes that you want to use as the title in search results. If you set this to '#heading', then use the first heading from a file as the title.<br>
|
||||||
|
Leave empty to disable.`)
|
||||||
|
)
|
||||||
|
.addText(component => {
|
||||||
|
component.setValue(settings.displayTitle).onChange(async v => {
|
||||||
|
await clearCacheDebounced()
|
||||||
|
settings.displayTitle = v
|
||||||
|
await saveSettings(plugin)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
// Additional text files to index
|
||||||
|
new Setting(containerEl)
|
||||||
|
.setName('Additional TEXT files to index')
|
||||||
|
.setDesc(
|
||||||
|
htmlDescription(`In addition to standard <code>md</code> files, Locator can also index other <strong style="color: var(--text-accent)">PLAINTEXT</strong> files.<br/>
|
||||||
|
Add extensions separated by a space, without the dot. Example: "<code>txt org csv</code>".<br />
|
||||||
|
⚠️ <span style="color: var(--text-accent)">Using extensions of non-plaintext files (like .pptx) WILL cause crashes,
|
||||||
|
because Locator will try to index their content.</span>`)
|
||||||
|
)
|
||||||
|
.addText(component => {
|
||||||
|
component
|
||||||
|
.setValue(settings.indexedFileTypes.join(' '))
|
||||||
|
.setPlaceholder('Example: txt org csv')
|
||||||
|
.onChange(async v => {
|
||||||
|
await database.clearCache()
|
||||||
|
settings.indexedFileTypes = v.split(' ')
|
||||||
|
await saveSettings(plugin)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
98
src/settings/settings-ui.ts
Normal file
98
src/settings/settings-ui.ts
Normal file
|
@ -0,0 +1,98 @@
|
||||||
|
import { Setting } from 'obsidian'
|
||||||
|
import type LocatorPlugin from 'src/main'
|
||||||
|
import { showExcerpt } from '.'
|
||||||
|
import type { LocatorSettings } from './utils'
|
||||||
|
import { saveSettings } from './utils'
|
||||||
|
import { htmlDescription } from './utils'
|
||||||
|
|
||||||
|
export function injectSettingsUserInterface(
|
||||||
|
plugin: LocatorPlugin,
|
||||||
|
settings: LocatorSettings,
|
||||||
|
containerEl: HTMLElement
|
||||||
|
) {
|
||||||
|
new Setting(containerEl).setName('User Interface').setHeading()
|
||||||
|
|
||||||
|
// Show Ribbon Icon
|
||||||
|
new Setting(containerEl)
|
||||||
|
.setName('Show ribbon button')
|
||||||
|
.setDesc('Add a button on the sidebar to open the Vault search modal.')
|
||||||
|
.addToggle(toggle =>
|
||||||
|
toggle.setValue(settings.ribbonIcon).onChange(async v => {
|
||||||
|
settings.ribbonIcon = v
|
||||||
|
await saveSettings(plugin)
|
||||||
|
if (v) {
|
||||||
|
plugin.addRibbonButton()
|
||||||
|
} else {
|
||||||
|
plugin.removeRibbonButton()
|
||||||
|
}
|
||||||
|
})
|
||||||
|
)
|
||||||
|
|
||||||
|
// Show context excerpt
|
||||||
|
new Setting(containerEl)
|
||||||
|
.setName('Show excerpts')
|
||||||
|
.setDesc(
|
||||||
|
'Shows the contextual part of the note that matches the search. Disable this to only show filenames in results.'
|
||||||
|
)
|
||||||
|
.addToggle(toggle =>
|
||||||
|
toggle.setValue(settings.showExcerpt).onChange(async v => {
|
||||||
|
showExcerpt.set(v)
|
||||||
|
})
|
||||||
|
)
|
||||||
|
|
||||||
|
// Show embeds
|
||||||
|
new Setting(containerEl)
|
||||||
|
.setName('Show embed references')
|
||||||
|
.setDesc(
|
||||||
|
htmlDescription(`Some results are <a href="https://help.obsidian.md/Linking+notes+and+files/Embed+files">embedded</a> in other notes.<br>
|
||||||
|
This setting controls the maximum number of embeds to show in the search results. Set to 0 to disable.<br>
|
||||||
|
Also works with Text Extractor for embedded images and documents.`)
|
||||||
|
)
|
||||||
|
.addSlider(cb => {
|
||||||
|
cb.setLimits(0, 10, 1)
|
||||||
|
.setValue(settings.maxEmbeds)
|
||||||
|
.setDynamicTooltip()
|
||||||
|
.onChange(async v => {
|
||||||
|
settings.maxEmbeds = v
|
||||||
|
await saveSettings(plugin)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
// Keep line returns in excerpts
|
||||||
|
new Setting(containerEl)
|
||||||
|
.setName('Render line return in excerpts')
|
||||||
|
.setDesc('Activate this option to render line returns in result excerpts.')
|
||||||
|
.addToggle(toggle =>
|
||||||
|
toggle.setValue(settings.renderLineReturnInExcerpts).onChange(async v => {
|
||||||
|
settings.renderLineReturnInExcerpts = v
|
||||||
|
await saveSettings(plugin)
|
||||||
|
})
|
||||||
|
)
|
||||||
|
|
||||||
|
// Show "Create note" button
|
||||||
|
new Setting(containerEl)
|
||||||
|
.setName('Show "Create note" button')
|
||||||
|
.setDesc(
|
||||||
|
htmlDescription(`Shows a button next to the search input, to create a note.
|
||||||
|
Acts the same as the <code>shift ↵</code> shortcut, can be useful for mobile device users.`)
|
||||||
|
)
|
||||||
|
.addToggle(toggle =>
|
||||||
|
toggle.setValue(settings.showCreateButton).onChange(async v => {
|
||||||
|
settings.showCreateButton = v
|
||||||
|
await saveSettings(plugin)
|
||||||
|
})
|
||||||
|
)
|
||||||
|
|
||||||
|
// Highlight results
|
||||||
|
new Setting(containerEl)
|
||||||
|
.setName('Highlight matching words in results')
|
||||||
|
.setDesc(
|
||||||
|
'Will highlight matching results when enabled. See README for more customization options.'
|
||||||
|
)
|
||||||
|
.addToggle(toggle =>
|
||||||
|
toggle.setValue(settings.highlight).onChange(async v => {
|
||||||
|
settings.highlight = v
|
||||||
|
await saveSettings(plugin)
|
||||||
|
})
|
||||||
|
)
|
||||||
|
}
|
127
src/settings/settings-weighting.ts
Normal file
127
src/settings/settings-weighting.ts
Normal file
|
@ -0,0 +1,127 @@
|
||||||
|
import { Setting, SliderComponent } from 'obsidian'
|
||||||
|
import { getDefaultSettings } from 'src/settings'
|
||||||
|
import type { LocatorSettings } from './utils'
|
||||||
|
import { saveSettings } from './utils'
|
||||||
|
import type { WeightingSettings } from './utils'
|
||||||
|
import type LocatorPlugin from 'src/main'
|
||||||
|
import { RecencyCutoff } from 'src/globals'
|
||||||
|
|
||||||
|
export function injectSettingsWeighting(
|
||||||
|
plugin: LocatorPlugin,
|
||||||
|
settings: LocatorSettings,
|
||||||
|
containerEl: HTMLElement,
|
||||||
|
refreshDisplay: () => void
|
||||||
|
) {
|
||||||
|
function weightSlider(
|
||||||
|
cb: SliderComponent,
|
||||||
|
key: keyof WeightingSettings
|
||||||
|
): void {
|
||||||
|
cb.setLimits(1, 10, 0.5)
|
||||||
|
.setValue(settings[key])
|
||||||
|
.setDynamicTooltip()
|
||||||
|
.onChange(async v => {
|
||||||
|
settings[key] = v
|
||||||
|
await saveSettings(plugin)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
const defaultSettings = getDefaultSettings(plugin.app)
|
||||||
|
|
||||||
|
new Setting(containerEl).setName('Results weighting').setHeading()
|
||||||
|
|
||||||
|
new Setting(containerEl)
|
||||||
|
.setName(
|
||||||
|
`File name & declared aliases (default: ${defaultSettings.weightBasename})`
|
||||||
|
)
|
||||||
|
.addSlider(cb => weightSlider(cb, 'weightBasename'))
|
||||||
|
|
||||||
|
new Setting(containerEl)
|
||||||
|
.setName(`File directory (default: ${defaultSettings.weightDirectory})`)
|
||||||
|
.addSlider(cb => weightSlider(cb, 'weightDirectory'))
|
||||||
|
|
||||||
|
new Setting(containerEl)
|
||||||
|
.setName(`Headings level 1 (default: ${defaultSettings.weightH1})`)
|
||||||
|
.addSlider(cb => weightSlider(cb, 'weightH1'))
|
||||||
|
|
||||||
|
new Setting(containerEl)
|
||||||
|
.setName(`Headings level 2 (default: ${defaultSettings.weightH2})`)
|
||||||
|
.addSlider(cb => weightSlider(cb, 'weightH2'))
|
||||||
|
|
||||||
|
new Setting(containerEl)
|
||||||
|
.setName(`Headings level 3 (default: ${defaultSettings.weightH3})`)
|
||||||
|
.addSlider(cb => weightSlider(cb, 'weightH3'))
|
||||||
|
|
||||||
|
new Setting(containerEl)
|
||||||
|
.setName(`Tags (default: ${defaultSettings.weightUnmarkedTags})`)
|
||||||
|
.addSlider(cb => weightSlider(cb, 'weightUnmarkedTags'))
|
||||||
|
|
||||||
|
new Setting(containerEl)
|
||||||
|
.setName('Header properties fields')
|
||||||
|
.setDesc(
|
||||||
|
'You can set custom weights for values of header properties (e.g. "keywords"). Weights under 1.0 will downrank the results.'
|
||||||
|
)
|
||||||
|
|
||||||
|
for (let i = 0; i < settings.weightCustomProperties.length; i++) {
|
||||||
|
const item = settings.weightCustomProperties[i]
|
||||||
|
const el = new Setting(containerEl).setName((i + 1).toString() + '.')
|
||||||
|
el.settingEl.style.paddingLeft = '2em'
|
||||||
|
|
||||||
|
// TODO: add autocompletion from app.metadataCache.getAllPropertyInfos()
|
||||||
|
el.addText(text => {
|
||||||
|
text
|
||||||
|
.setPlaceholder('Property name')
|
||||||
|
.setValue(item.name)
|
||||||
|
.onChange(async v => {
|
||||||
|
item.name = v
|
||||||
|
await saveSettings(plugin)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
.addSlider(cb => {
|
||||||
|
cb.setLimits(0.1, 5, 0.1)
|
||||||
|
.setValue(item.weight)
|
||||||
|
.setDynamicTooltip()
|
||||||
|
.onChange(async v => {
|
||||||
|
item.weight = v
|
||||||
|
await saveSettings(plugin)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
// Remove the tag
|
||||||
|
.addButton(btn => {
|
||||||
|
btn.setButtonText('Remove')
|
||||||
|
btn.onClick(async () => {
|
||||||
|
settings.weightCustomProperties.splice(i, 1)
|
||||||
|
await saveSettings(plugin)
|
||||||
|
refreshDisplay()
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add a new custom tag
|
||||||
|
new Setting(containerEl).addButton(btn => {
|
||||||
|
btn.setButtonText('Add a new property')
|
||||||
|
btn.onClick(_cb => {
|
||||||
|
settings.weightCustomProperties.push({ name: '', weight: 1 })
|
||||||
|
refreshDisplay()
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
new Setting(containerEl)
|
||||||
|
.setName('Recency boost (experimental)')
|
||||||
|
.setDesc(
|
||||||
|
'Files that have been modified more recently than [selected cutoff] are given a higher rank.'
|
||||||
|
)
|
||||||
|
.addDropdown(dropdown =>
|
||||||
|
dropdown
|
||||||
|
.addOptions({
|
||||||
|
[RecencyCutoff.Disabled]: 'Disabled',
|
||||||
|
[RecencyCutoff.Day]: '24 hours',
|
||||||
|
[RecencyCutoff.Week]: '7 days',
|
||||||
|
[RecencyCutoff.Month]: '30 days',
|
||||||
|
})
|
||||||
|
.setValue(settings.recencyBoost)
|
||||||
|
.onChange(async v => {
|
||||||
|
settings.recencyBoost = v as RecencyCutoff
|
||||||
|
await saveSettings(plugin)
|
||||||
|
})
|
||||||
|
)
|
||||||
|
}
|
92
src/settings/utils.ts
Normal file
92
src/settings/utils.ts
Normal file
|
@ -0,0 +1,92 @@
|
||||||
|
import { App, Platform, Plugin } from 'obsidian'
|
||||||
|
import { K_DISABLE_OMNISEARCH, RecencyCutoff } from 'src/globals'
|
||||||
|
import { settings } from '.'
|
||||||
|
|
||||||
|
export function htmlDescription(innerHTML: string): DocumentFragment {
|
||||||
|
const desc = new DocumentFragment()
|
||||||
|
desc.createSpan({}, span => {
|
||||||
|
span.innerHTML = innerHTML
|
||||||
|
})
|
||||||
|
return desc
|
||||||
|
}
|
||||||
|
|
||||||
|
export const needsARestart = `<strong style="color: var(--text-accent)">Needs a restart to fully take effect.</strong>`
|
||||||
|
|
||||||
|
export interface WeightingSettings {
|
||||||
|
weightBasename: number
|
||||||
|
weightDirectory: number
|
||||||
|
weightH1: number
|
||||||
|
weightH2: number
|
||||||
|
weightH3: number
|
||||||
|
weightUnmarkedTags: number
|
||||||
|
}
|
||||||
|
export function isPluginDisabled(app: App): boolean {
|
||||||
|
return app.loadLocalStorage(K_DISABLE_OMNISEARCH) === '1'
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function saveSettings(plugin: Plugin): Promise<void> {
|
||||||
|
await plugin.saveData(settings)
|
||||||
|
}
|
||||||
|
|
||||||
|
export function isCacheEnabled(): boolean {
|
||||||
|
return !Platform.isIosApp && settings.useCache
|
||||||
|
}
|
||||||
|
export interface LocatorSettings extends WeightingSettings {
|
||||||
|
weightCustomProperties: { name: string; weight: number }[]
|
||||||
|
/** Enables caching to speed up indexing */
|
||||||
|
useCache: boolean
|
||||||
|
/** Respect the "excluded files" Obsidian setting by downranking results ignored files */
|
||||||
|
hideExcluded: boolean
|
||||||
|
/** Boost more recent files */
|
||||||
|
recencyBoost: RecencyCutoff
|
||||||
|
/** downrank files in the given folders */
|
||||||
|
downrankedFoldersFilters: string[]
|
||||||
|
/** Ignore diacritics when indexing files */
|
||||||
|
ignoreDiacritics: boolean
|
||||||
|
ignoreArabicDiacritics: boolean
|
||||||
|
|
||||||
|
/** Extensions of plain text files to index, in addition to .md */
|
||||||
|
indexedFileTypes: string[]
|
||||||
|
/** Custom title field */
|
||||||
|
displayTitle: string
|
||||||
|
/** Enable PDF indexing */
|
||||||
|
PDFIndexing: boolean
|
||||||
|
/** Enable Images indexing */
|
||||||
|
imagesIndexing: boolean
|
||||||
|
/** Enable Office documents indexing */
|
||||||
|
officeIndexing: boolean
|
||||||
|
/** Enable image ai indexing */
|
||||||
|
aiImageIndexing: boolean
|
||||||
|
|
||||||
|
/** Enable indexing of unknown files */
|
||||||
|
unsupportedFilesIndexing: 'yes' | 'no' | 'default'
|
||||||
|
/** Activate the small 🔍 button on Obsidian's ribbon */
|
||||||
|
ribbonIcon: boolean
|
||||||
|
/** Display the small contextual excerpt in search results */
|
||||||
|
showExcerpt: boolean
|
||||||
|
/** Number of embeds references to display in search results */
|
||||||
|
maxEmbeds: number
|
||||||
|
/** Render line returns with <br> in excerpts */
|
||||||
|
renderLineReturnInExcerpts: boolean
|
||||||
|
/** Enable a "create note" button in the Vault Search modal */
|
||||||
|
showCreateButton: boolean
|
||||||
|
/** Re-execute the last query when opening Locator */
|
||||||
|
showPreviousQueryResults: boolean
|
||||||
|
/** Key for the welcome message when Obsidian is updated. A message is only shown once. */
|
||||||
|
welcomeMessage: string
|
||||||
|
/** If a query returns 0 result, try again with more relax conditions */
|
||||||
|
simpleSearch: boolean
|
||||||
|
tokenizeUrls: boolean
|
||||||
|
highlight: boolean
|
||||||
|
splitCamelCase: boolean
|
||||||
|
openInNewPane: boolean
|
||||||
|
verboseLogging: boolean
|
||||||
|
vimLikeNavigationShortcut: boolean
|
||||||
|
fuzziness: '0' | '1' | '2'
|
||||||
|
httpApiEnabled: boolean
|
||||||
|
httpApiPort: string
|
||||||
|
httpApiNotice: boolean
|
||||||
|
|
||||||
|
DANGER_httpHost: string | null
|
||||||
|
DANGER_forceSaveCache: boolean
|
||||||
|
}
|
78
src/tools/api-server.ts
Normal file
78
src/tools/api-server.ts
Normal file
|
@ -0,0 +1,78 @@
|
||||||
|
import * as http from 'http'
|
||||||
|
import * as url from 'url'
|
||||||
|
import { Notice } from 'obsidian'
|
||||||
|
import type LocatorPlugin from '../main'
|
||||||
|
import { getApi } from './api'
|
||||||
|
|
||||||
|
export function getServer(plugin: LocatorPlugin) {
|
||||||
|
const api = getApi(plugin)
|
||||||
|
const server = http.createServer(async function (req, res) {
|
||||||
|
res.setHeader('Access-Control-Allow-Origin', '*')
|
||||||
|
res.setHeader(
|
||||||
|
'Access-Control-Allow-Methods',
|
||||||
|
'GET, HEAD, POST, OPTIONS, PUT, PATCH, DELETE'
|
||||||
|
)
|
||||||
|
res.setHeader(
|
||||||
|
'Access-Control-Allow-Headers',
|
||||||
|
'Access-Control-Allow-Headers, Origin, Authorization,Accept,x-client-id, X-Requested-With, Content-Type, Access-Control-Request-Method, Access-Control-Request-Headers, hypothesis-client-version'
|
||||||
|
)
|
||||||
|
res.setHeader('Access-Control-Allow-Credentials', 'true')
|
||||||
|
|
||||||
|
try {
|
||||||
|
if (req.url) {
|
||||||
|
// parse URL
|
||||||
|
const parsedUrl = url.parse(req.url, true)
|
||||||
|
if (parsedUrl.pathname === '/search') {
|
||||||
|
const q = parsedUrl.query.q as string
|
||||||
|
const results = await api.search(q)
|
||||||
|
res.statusCode = 200
|
||||||
|
res.setHeader('Content-Type', 'application/json')
|
||||||
|
res.end(JSON.stringify(results))
|
||||||
|
} else {
|
||||||
|
res.end()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (e) {
|
||||||
|
res.statusCode = 500
|
||||||
|
res.end(e)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
return {
|
||||||
|
listen(port: string) {
|
||||||
|
console.debug(`Locator - Starting HTTP server on port ${port}`)
|
||||||
|
server.listen(
|
||||||
|
{
|
||||||
|
port: parseInt(port),
|
||||||
|
host: plugin.settings.DANGER_httpHost ?? 'localhost',
|
||||||
|
},
|
||||||
|
() => {
|
||||||
|
console.log(`Locator - Started HTTP server on port ${port}`)
|
||||||
|
if (plugin.settings.DANGER_httpHost && plugin.settings.DANGER_httpHost !== 'localhost') {
|
||||||
|
new Notice(`Locator - Started non-localhost HTTP server at ${plugin.settings.DANGER_httpHost}:${port}`, 120_000)
|
||||||
|
}
|
||||||
|
else if (plugin.settings.httpApiNotice) {
|
||||||
|
new Notice(`Locator - Started HTTP server on port ${port}`)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
server.on('error', e => {
|
||||||
|
console.error(e)
|
||||||
|
new Notice(
|
||||||
|
`Locator - Cannot start HTTP server on ${port}. See console for more details.`
|
||||||
|
)
|
||||||
|
})
|
||||||
|
},
|
||||||
|
close() {
|
||||||
|
server.close()
|
||||||
|
console.log(`Locator - Terminated HTTP server`)
|
||||||
|
if (plugin.settings.httpApiEnabled && plugin.settings.httpApiNotice) {
|
||||||
|
new Notice(`Locator - Terminated HTTP server`)
|
||||||
|
}
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export default getServer
|
||||||
|
export type StaticServer = ReturnType<typeof getServer>
|
108
src/tools/api.ts
Normal file
108
src/tools/api.ts
Normal file
|
@ -0,0 +1,108 @@
|
||||||
|
import type { ResultNote } from '../globals'
|
||||||
|
import { Query } from '../search/query'
|
||||||
|
import type LocatorPlugin from '../main'
|
||||||
|
import { LocatorVaultModal } from '../components/modals'
|
||||||
|
|
||||||
|
type ResultNoteApi = {
|
||||||
|
score: number
|
||||||
|
vault: string
|
||||||
|
path: string
|
||||||
|
basename: string
|
||||||
|
foundWords: string[]
|
||||||
|
matches: SearchMatchApi[]
|
||||||
|
excerpt: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export type SearchMatchApi = {
|
||||||
|
match: string
|
||||||
|
offset: number
|
||||||
|
}
|
||||||
|
|
||||||
|
let notified = false
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Callbacks to be called when the search index is ready
|
||||||
|
*/
|
||||||
|
let onIndexedCallbacks: Array<() => void> = []
|
||||||
|
|
||||||
|
function mapResults(
|
||||||
|
plugin: LocatorPlugin,
|
||||||
|
results: ResultNote[]
|
||||||
|
): ResultNoteApi[] {
|
||||||
|
return results.map(result => {
|
||||||
|
const { score, path, basename, foundWords, matches, content } = result
|
||||||
|
|
||||||
|
const excerpt = plugin.textProcessor.makeExcerpt(
|
||||||
|
content,
|
||||||
|
matches[0]?.offset ?? -1
|
||||||
|
)
|
||||||
|
|
||||||
|
const res: ResultNoteApi = {
|
||||||
|
score,
|
||||||
|
vault: plugin.app.vault.getName(),
|
||||||
|
path,
|
||||||
|
basename,
|
||||||
|
foundWords,
|
||||||
|
matches: matches.map(match => {
|
||||||
|
return {
|
||||||
|
match: match.match,
|
||||||
|
offset: match.offset,
|
||||||
|
}
|
||||||
|
}),
|
||||||
|
excerpt: excerpt,
|
||||||
|
}
|
||||||
|
|
||||||
|
return res
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
export function notifyOnIndexed(): void {
|
||||||
|
notified = true
|
||||||
|
onIndexedCallbacks.forEach(cb => cb())
|
||||||
|
}
|
||||||
|
|
||||||
|
let registed = false
|
||||||
|
|
||||||
|
export function registerAPI(plugin: LocatorPlugin): void {
|
||||||
|
if (registed) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
registed = true
|
||||||
|
|
||||||
|
// Url scheme for obsidian://locator?query=foobar
|
||||||
|
plugin.registerObsidianProtocolHandler('locator', params => {
|
||||||
|
new LocatorVaultModal(plugin, params.query).open()
|
||||||
|
})
|
||||||
|
|
||||||
|
const api = getApi(plugin)
|
||||||
|
|
||||||
|
// Public api
|
||||||
|
// @ts-ignore
|
||||||
|
globalThis['locator'] = api
|
||||||
|
// Deprecated
|
||||||
|
;(plugin.app as any).plugins.plugins.locator.api = api
|
||||||
|
}
|
||||||
|
|
||||||
|
export function getApi(plugin: LocatorPlugin) {
|
||||||
|
return {
|
||||||
|
async search(q: string): Promise<ResultNoteApi[]> {
|
||||||
|
const query = new Query(q, {
|
||||||
|
ignoreDiacritics: plugin.settings.ignoreDiacritics,
|
||||||
|
ignoreArabicDiacritics: plugin.settings.ignoreArabicDiacritics,
|
||||||
|
})
|
||||||
|
const raw = await plugin.searchEngine.getSuggestions(query)
|
||||||
|
return mapResults(plugin, raw)
|
||||||
|
},
|
||||||
|
registerOnIndexed(cb: () => void): void {
|
||||||
|
onIndexedCallbacks.push(cb)
|
||||||
|
// Immediately call the callback if the indexing is already ready done
|
||||||
|
if (notified) {
|
||||||
|
cb()
|
||||||
|
}
|
||||||
|
},
|
||||||
|
unregisterOnIndexed(cb: () => void): void {
|
||||||
|
onIndexedCallbacks = onIndexedCallbacks.filter(o => o !== cb)
|
||||||
|
},
|
||||||
|
refreshIndex: plugin.notesIndexer.refreshIndex,
|
||||||
|
}
|
||||||
|
}
|
66
src/tools/event-bus.ts
Normal file
66
src/tools/event-bus.ts
Normal file
|
@ -0,0 +1,66 @@
|
||||||
|
export type EventBusCallback = (...args: any[]) => any
|
||||||
|
|
||||||
|
export class EventBus {
|
||||||
|
private handlers: Map<string, EventBusCallback> = new Map()
|
||||||
|
private disabled: string[] = []
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Adds a subscription for `event`, for the specified `context`.
|
||||||
|
* If a subscription for the same event in the same context already exists, this will overwrite it.
|
||||||
|
* @param context
|
||||||
|
* @param event
|
||||||
|
* @param callback
|
||||||
|
*/
|
||||||
|
public on(context: string, event: string, callback: EventBusCallback): void {
|
||||||
|
if (context.includes('@') || event.includes('@')) {
|
||||||
|
throw new Error('Invalid context/event name - Cannot contain @')
|
||||||
|
}
|
||||||
|
this.handlers.set(`${context}@${event}`, callback)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Removes the subscription for an `event` in the `context`.
|
||||||
|
* If `event` is left empty, removes all subscriptions.
|
||||||
|
* @param context
|
||||||
|
* @param event
|
||||||
|
*/
|
||||||
|
public off(context: string, event?: string): void {
|
||||||
|
if (event) {
|
||||||
|
this.handlers.delete(`${context}@${event}`)
|
||||||
|
} else {
|
||||||
|
for (const [key] of this.handlers.entries()) {
|
||||||
|
if (key.startsWith(`${context}@`)) {
|
||||||
|
this.handlers.delete(key)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Disables a `context`. Does not remove subscriptions, but all events for related listeners will be ignored.
|
||||||
|
* @param context
|
||||||
|
*/
|
||||||
|
public disable(context: string): void {
|
||||||
|
this.enable(context)
|
||||||
|
this.disabled.push(context)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Re-enables a `context`.
|
||||||
|
* @param context
|
||||||
|
*/
|
||||||
|
public enable(context: string): void {
|
||||||
|
this.disabled = this.disabled.filter(v => v !== context)
|
||||||
|
}
|
||||||
|
|
||||||
|
public emit(event: string, ...args: any[]): void {
|
||||||
|
const entries = [...this.handlers.entries()].filter(
|
||||||
|
([k, _]) => !this.disabled.includes(k.split('@')[0])
|
||||||
|
)
|
||||||
|
for (const [key, handler] of entries) {
|
||||||
|
if (key.endsWith(`@${event}`)) {
|
||||||
|
handler(...args)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
190
src/tools/icon-utils.ts
Normal file
190
src/tools/icon-utils.ts
Normal file
|
@ -0,0 +1,190 @@
|
||||||
|
import { getIcon, normalizePath } from 'obsidian'
|
||||||
|
import type LocatorPlugin from '../main'
|
||||||
|
import {
|
||||||
|
isFileImage,
|
||||||
|
isFilePDF,
|
||||||
|
isFileCanvas,
|
||||||
|
isFileExcalidraw,
|
||||||
|
warnVerbose,
|
||||||
|
} from './utils'
|
||||||
|
import { escapeHTML } from './text-processing'
|
||||||
|
|
||||||
|
export interface IconPacks {
|
||||||
|
prefixToIconPack: { [prefix: string]: string }
|
||||||
|
iconsPath: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function loadIconData(plugin: LocatorPlugin): Promise<any> {
|
||||||
|
const app = plugin.app
|
||||||
|
|
||||||
|
// Check if the 'obsidian-icon-folder' plugin is installed and enabled
|
||||||
|
// Casting 'app' to 'any' here to avoid TypeScript errors since 'plugins' might not be defined on 'App'
|
||||||
|
const iconFolderPlugin = (app as any).plugins.getPlugin(
|
||||||
|
'obsidian-icon-folder'
|
||||||
|
)
|
||||||
|
if (!iconFolderPlugin) {
|
||||||
|
return {}
|
||||||
|
}
|
||||||
|
|
||||||
|
const dataJsonPath = `${app.vault.configDir}/plugins/obsidian-icon-folder/data.json`
|
||||||
|
try {
|
||||||
|
const dataJsonContent = await app.vault.adapter.read(dataJsonPath)
|
||||||
|
const rawIconData = JSON.parse(dataJsonContent)
|
||||||
|
// Normalize keys
|
||||||
|
const iconData: any = {}
|
||||||
|
for (const key in rawIconData) {
|
||||||
|
const normalizedKey = normalizePath(key)
|
||||||
|
iconData[normalizedKey] = rawIconData[key]
|
||||||
|
}
|
||||||
|
return iconData
|
||||||
|
} catch (e) {
|
||||||
|
warnVerbose('Failed to read data.json:', e)
|
||||||
|
return {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function initializeIconPacks(
|
||||||
|
plugin: LocatorPlugin
|
||||||
|
): Promise<IconPacks> {
|
||||||
|
// Add 'Li' prefix for Lucide icons
|
||||||
|
const prefixToIconPack: { [prefix: string]: string } = { Li: 'lucide-icons' }
|
||||||
|
let iconsPath = 'icons'
|
||||||
|
|
||||||
|
const app = plugin.app
|
||||||
|
|
||||||
|
// Access the obsidian-icon-folder plugin
|
||||||
|
const iconFolderPlugin = (app as any).plugins.getPlugin(
|
||||||
|
'obsidian-icon-folder'
|
||||||
|
)
|
||||||
|
|
||||||
|
if (iconFolderPlugin) {
|
||||||
|
// Get the icons path from the plugin's settings
|
||||||
|
const iconFolderSettings = iconFolderPlugin.settings
|
||||||
|
iconsPath = iconFolderSettings?.iconPacksPath || 'icons'
|
||||||
|
const iconsDir = `${app.vault.configDir}/${iconsPath}`
|
||||||
|
|
||||||
|
try {
|
||||||
|
const iconPackDirs = await app.vault.adapter.list(iconsDir)
|
||||||
|
if (iconPackDirs.folders && iconPackDirs.folders.length > 0) {
|
||||||
|
for (const folderPath of iconPackDirs.folders) {
|
||||||
|
const pathParts = folderPath.split('/')
|
||||||
|
const iconPackName = pathParts[pathParts.length - 1]
|
||||||
|
const prefix = createIconPackPrefix(iconPackName)
|
||||||
|
prefixToIconPack[prefix] = iconPackName
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (e) {
|
||||||
|
warnVerbose('Failed to list icon packs:', e)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return { prefixToIconPack, iconsPath }
|
||||||
|
}
|
||||||
|
|
||||||
|
function createIconPackPrefix(iconPackName: string): string {
|
||||||
|
if (iconPackName.includes('-')) {
|
||||||
|
const splitted = iconPackName.split('-')
|
||||||
|
let result = splitted[0].charAt(0).toUpperCase()
|
||||||
|
for (let i = 1; i < splitted.length; i++) {
|
||||||
|
result += splitted[i].charAt(0).toLowerCase()
|
||||||
|
}
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
return (
|
||||||
|
iconPackName.charAt(0).toUpperCase() + iconPackName.charAt(1).toLowerCase()
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
export function getIconNameForPath(path: string, iconData: any): string | null {
|
||||||
|
const normalizedPath = normalizePath(path)
|
||||||
|
const iconEntry = iconData[normalizedPath]
|
||||||
|
if (iconEntry) {
|
||||||
|
if (typeof iconEntry === 'string') {
|
||||||
|
return iconEntry
|
||||||
|
} else if (typeof iconEntry === 'object' && iconEntry.iconName) {
|
||||||
|
return iconEntry.iconName
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
export function parseIconName(iconName: string): {
|
||||||
|
prefix: string
|
||||||
|
name: string
|
||||||
|
} {
|
||||||
|
const prefixMatch = iconName.match(/^[A-Z][a-z]*/)
|
||||||
|
if (prefixMatch) {
|
||||||
|
const prefix = prefixMatch[0]
|
||||||
|
const name = iconName.substring(prefix.length)
|
||||||
|
return { prefix, name }
|
||||||
|
} else {
|
||||||
|
// No prefix, treat the entire iconName as the name
|
||||||
|
return { prefix: '', name: iconName }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function loadIconSVG(
|
||||||
|
iconName: string,
|
||||||
|
plugin: LocatorPlugin,
|
||||||
|
iconsPath: string,
|
||||||
|
prefixToIconPack: { [prefix: string]: string }
|
||||||
|
): Promise<string | null> {
|
||||||
|
const parsed = parseIconName(iconName)
|
||||||
|
const { prefix, name } = parsed
|
||||||
|
|
||||||
|
if (!prefix) {
|
||||||
|
// No prefix, assume it's an emoji or text
|
||||||
|
return `<span class="locator-result__icon--emoji">${escapeHTML(
|
||||||
|
name
|
||||||
|
)}</span>`
|
||||||
|
}
|
||||||
|
|
||||||
|
const iconPackName = prefixToIconPack[prefix]
|
||||||
|
|
||||||
|
if (!iconPackName) {
|
||||||
|
warnVerbose(`No icon pack found for prefix: ${prefix}`)
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
if (iconPackName === 'lucide-icons') {
|
||||||
|
// Convert CamelCase to dash-case for Lucide icons
|
||||||
|
const dashedName = name.replace(/([a-z])([A-Z])/g, '$1-$2').toLowerCase()
|
||||||
|
const iconEl = getIcon(dashedName)
|
||||||
|
if (iconEl) {
|
||||||
|
return iconEl.outerHTML
|
||||||
|
} else {
|
||||||
|
warnVerbose(`Lucide icon not found: ${dashedName}`)
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if (!iconsPath) {
|
||||||
|
warnVerbose('Icons path is not set. Cannot load icon SVG.')
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
const iconPath = `${plugin.app.vault.configDir}/${iconsPath}/${iconPackName}/${name}.svg`
|
||||||
|
try {
|
||||||
|
const svgContent = await plugin.app.vault.adapter.read(iconPath)
|
||||||
|
return svgContent
|
||||||
|
} catch (e) {
|
||||||
|
warnVerbose(`Failed to load icon SVG for ${iconName} at ${iconPath}:`, e)
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export function getDefaultIconSVG(
|
||||||
|
notePath: string,
|
||||||
|
plugin: LocatorPlugin
|
||||||
|
): string {
|
||||||
|
// Return SVG content for default icons based on file type
|
||||||
|
let iconName = 'file'
|
||||||
|
if (isFileImage(notePath)) {
|
||||||
|
iconName = 'image'
|
||||||
|
} else if (isFilePDF(notePath)) {
|
||||||
|
iconName = 'file-text'
|
||||||
|
} else if (isFileCanvas(notePath) || isFileExcalidraw(notePath)) {
|
||||||
|
iconName = 'layout-dashboard'
|
||||||
|
}
|
||||||
|
const iconEl = getIcon(iconName)
|
||||||
|
return iconEl ? iconEl.outerHTML : ''
|
||||||
|
}
|
103
src/tools/notes.ts
Normal file
103
src/tools/notes.ts
Normal file
|
@ -0,0 +1,103 @@
|
||||||
|
import { type App, type CachedMetadata, MarkdownView, TFile } from 'obsidian'
|
||||||
|
import type { ResultNote } from '../globals'
|
||||||
|
|
||||||
|
export async function openNote(
|
||||||
|
app: App,
|
||||||
|
item: ResultNote,
|
||||||
|
offset = 0,
|
||||||
|
newPane = false,
|
||||||
|
newLeaf = false
|
||||||
|
): Promise<void> {
|
||||||
|
// Check if the note is already open,
|
||||||
|
// to avoid opening it twice if the first one is pinned
|
||||||
|
let alreadyOpenAndPinned = false
|
||||||
|
app.workspace.iterateAllLeaves(leaf => {
|
||||||
|
if (leaf.view instanceof MarkdownView) {
|
||||||
|
if (
|
||||||
|
!newPane &&
|
||||||
|
leaf.getViewState().state?.file === item.path &&
|
||||||
|
leaf.getViewState()?.pinned
|
||||||
|
) {
|
||||||
|
app.workspace.setActiveLeaf(leaf, { focus: true })
|
||||||
|
alreadyOpenAndPinned = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
if (!alreadyOpenAndPinned) {
|
||||||
|
// Open the note normally
|
||||||
|
await app.workspace.openLinkText(item.path, '', newLeaf ? 'split' : newPane)
|
||||||
|
}
|
||||||
|
|
||||||
|
const view = app.workspace.getActiveViewOfType(MarkdownView)
|
||||||
|
if (!view) {
|
||||||
|
// Not an editable document, so no cursor to place
|
||||||
|
// throw new Error('OmniSearch - No active MarkdownView')
|
||||||
|
return
|
||||||
|
}
|
||||||
|
const pos = view.editor.offsetToPos(offset)
|
||||||
|
// pos.ch = 0
|
||||||
|
|
||||||
|
view.editor.setCursor(pos)
|
||||||
|
view.editor.scrollIntoView({
|
||||||
|
from: { line: pos.line - 10, ch: 0 },
|
||||||
|
to: { line: pos.line + 10, ch: 0 },
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function createNote(
|
||||||
|
app: App,
|
||||||
|
name: string,
|
||||||
|
newLeaf = false
|
||||||
|
): Promise<void> {
|
||||||
|
try {
|
||||||
|
let pathPrefix: string
|
||||||
|
switch (app.vault.getConfig('newFileLocation')) {
|
||||||
|
case 'current':
|
||||||
|
pathPrefix = (app.workspace.getActiveFile()?.parent?.path ?? '') + '/'
|
||||||
|
break
|
||||||
|
case 'folder':
|
||||||
|
pathPrefix = app.vault.getConfig('newFileFolderPath') + '/'
|
||||||
|
break
|
||||||
|
default: // 'root'
|
||||||
|
pathPrefix = ''
|
||||||
|
break
|
||||||
|
}
|
||||||
|
await app.workspace.openLinkText(`${pathPrefix}${name}.md`, '', newLeaf)
|
||||||
|
} catch (e) {
|
||||||
|
;(e as any).message =
|
||||||
|
'OmniSearch - Could not create note: ' + (e as any).message
|
||||||
|
console.error(e)
|
||||||
|
throw e
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* For a given file, returns a list of links leading to notes that don't exist
|
||||||
|
* @param file
|
||||||
|
* @param metadata
|
||||||
|
* @returns
|
||||||
|
*/
|
||||||
|
export function getNonExistingNotes(
|
||||||
|
app: App,
|
||||||
|
file: TFile,
|
||||||
|
metadata: CachedMetadata
|
||||||
|
): string[] {
|
||||||
|
return (metadata.links ?? [])
|
||||||
|
.map(l => {
|
||||||
|
const path = removeAnchors(l.link)
|
||||||
|
return app.metadataCache.getFirstLinkpathDest(path, file.path)
|
||||||
|
? ''
|
||||||
|
: l.link
|
||||||
|
})
|
||||||
|
.filter(l => !!l)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Removes anchors and headings
|
||||||
|
* @param name
|
||||||
|
* @returns
|
||||||
|
*/
|
||||||
|
export function removeAnchors(name: string): string {
|
||||||
|
return name.split(/[\^#]+/)[0]
|
||||||
|
}
|
164
src/tools/text-processing.ts
Normal file
164
src/tools/text-processing.ts
Normal file
|
@ -0,0 +1,164 @@
|
||||||
|
import { excerptAfter, excerptBefore, type SearchMatch } from '../globals'
|
||||||
|
import { removeDiacritics, warnVerbose } from './utils'
|
||||||
|
import type { Query } from '../search/query'
|
||||||
|
import { Notice } from 'obsidian'
|
||||||
|
import { escapeRegExp } from 'lodash-es'
|
||||||
|
import type LocatorPlugin from '../main'
|
||||||
|
|
||||||
|
export class TextProcessor {
|
||||||
|
constructor(private plugin: LocatorPlugin) {}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Wraps the matches in the text with a <span> element and a highlight class
|
||||||
|
* @param text
|
||||||
|
* @param matches
|
||||||
|
* @returns The html string with the matches highlighted
|
||||||
|
*/
|
||||||
|
public highlightText(text: string, matches: SearchMatch[]): string {
|
||||||
|
const highlightClass = `suggestion-highlight locator-highlight ${
|
||||||
|
this.plugin.settings.highlight ? 'locator-default-highlight' : ''
|
||||||
|
}`
|
||||||
|
|
||||||
|
if (!matches.length) {
|
||||||
|
return text
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
return text.replace(
|
||||||
|
new RegExp(
|
||||||
|
`(${matches.map(item => escapeRegExp(item.match)).join('|')})`,
|
||||||
|
'giu'
|
||||||
|
),
|
||||||
|
`<span class="${highlightClass}">$1</span>`
|
||||||
|
)
|
||||||
|
} catch (e) {
|
||||||
|
console.error('Locator - Error in highlightText()', e)
|
||||||
|
return text
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Converts a list of strings to a list of words, using the \b word boundary.
|
||||||
|
* Used to find excerpts in a note body, or select which words to highlight.
|
||||||
|
*/
|
||||||
|
public stringsToRegex(strings: string[]): RegExp {
|
||||||
|
if (!strings.length) return /^$/g
|
||||||
|
|
||||||
|
// sort strings by decreasing length, so that longer strings are matched first
|
||||||
|
strings.sort((a, b) => b.length - a.length)
|
||||||
|
|
||||||
|
const joined = `(${strings
|
||||||
|
.map(s => `\\b${escapeRegExp(s)}\\b|${escapeRegExp(s)}`)
|
||||||
|
.join('|')})`
|
||||||
|
|
||||||
|
return new RegExp(`${joined}`, 'gui')
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns an array of matches in the text, using the provided regex
|
||||||
|
* @param text
|
||||||
|
* @param reg
|
||||||
|
* @param query
|
||||||
|
*/
|
||||||
|
public getMatches(
|
||||||
|
text: string,
|
||||||
|
words: string[],
|
||||||
|
query?: Query
|
||||||
|
): SearchMatch[] {
|
||||||
|
words = words.map(escapeHTML)
|
||||||
|
const reg = this.stringsToRegex(words)
|
||||||
|
const originalText = text
|
||||||
|
// text = text.toLowerCase().replace(new RegExp(SEPARATORS, 'gu'), ' ')
|
||||||
|
if (this.plugin.settings.ignoreDiacritics) {
|
||||||
|
text = removeDiacritics(text, this.plugin.settings.ignoreArabicDiacritics)
|
||||||
|
}
|
||||||
|
const startTime = new Date().getTime()
|
||||||
|
let match: RegExpExecArray | null = null
|
||||||
|
let matches: SearchMatch[] = []
|
||||||
|
let count = 0
|
||||||
|
while ((match = reg.exec(text)) !== null) {
|
||||||
|
// Avoid infinite loops, stop looking after 100 matches or if we're taking too much time
|
||||||
|
if (++count >= 100 || new Date().getTime() - startTime > 50) {
|
||||||
|
warnVerbose('Stopped getMatches at', count, 'results')
|
||||||
|
break
|
||||||
|
}
|
||||||
|
const matchStartIndex = match.index
|
||||||
|
const matchEndIndex = matchStartIndex + match[0].length
|
||||||
|
const originalMatch = originalText
|
||||||
|
.substring(matchStartIndex, matchEndIndex)
|
||||||
|
.trim()
|
||||||
|
if (originalMatch && match.index >= 0) {
|
||||||
|
matches.push({ match: originalMatch, offset: match.index })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// If the query is more than 1 token and can be found "as is" in the text, put this match first
|
||||||
|
if (
|
||||||
|
query &&
|
||||||
|
(query.query.text.length > 1 || query.getExactTerms().length > 0)
|
||||||
|
) {
|
||||||
|
const best = text.indexOf(query.getBestStringForExcerpt())
|
||||||
|
if (best > -1 && matches.find(m => m.offset === best)) {
|
||||||
|
matches.unshift({
|
||||||
|
offset: best,
|
||||||
|
match: query.getBestStringForExcerpt(),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return matches
|
||||||
|
}
|
||||||
|
|
||||||
|
public makeExcerpt(content: string, offset: number): string {
|
||||||
|
const settings = this.plugin.settings
|
||||||
|
try {
|
||||||
|
const pos = offset ?? -1
|
||||||
|
const from = Math.max(0, pos - excerptBefore)
|
||||||
|
const to = Math.min(content.length, pos + excerptAfter)
|
||||||
|
if (pos > -1) {
|
||||||
|
content =
|
||||||
|
(from > 0 ? '…' : '') +
|
||||||
|
content.slice(from, to).trim() +
|
||||||
|
(to < content.length - 1 ? '…' : '')
|
||||||
|
} else {
|
||||||
|
content = content.slice(0, excerptAfter)
|
||||||
|
}
|
||||||
|
if (settings.renderLineReturnInExcerpts) {
|
||||||
|
const lineReturn = new RegExp(/(?:\r\n|\r|\n)/g)
|
||||||
|
// Remove multiple line returns
|
||||||
|
content = content
|
||||||
|
.split(lineReturn)
|
||||||
|
.filter(l => l)
|
||||||
|
.join('\n')
|
||||||
|
|
||||||
|
const last = content.lastIndexOf('\n', pos - from)
|
||||||
|
|
||||||
|
if (last > 0) {
|
||||||
|
content = content.slice(last)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
content = escapeHTML(content)
|
||||||
|
|
||||||
|
if (settings.renderLineReturnInExcerpts) {
|
||||||
|
content = content.trim().replaceAll('\n', '<br>')
|
||||||
|
}
|
||||||
|
|
||||||
|
return content
|
||||||
|
} catch (e) {
|
||||||
|
new Notice(
|
||||||
|
'Locator - Error while creating excerpt, see developer console'
|
||||||
|
)
|
||||||
|
console.error(`Locator - Error while creating excerpt`)
|
||||||
|
console.error(e)
|
||||||
|
return ''
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export function escapeHTML(html: string): string {
|
||||||
|
return html
|
||||||
|
.replaceAll('&', '&')
|
||||||
|
.replaceAll('<', '<')
|
||||||
|
.replaceAll('>', '>')
|
||||||
|
.replaceAll('"', '"')
|
||||||
|
.replaceAll("'", ''')
|
||||||
|
}
|
281
src/tools/utils.ts
Normal file
281
src/tools/utils.ts
Normal file
|
@ -0,0 +1,281 @@
|
||||||
|
import {
|
||||||
|
type CachedMetadata,
|
||||||
|
getAllTags,
|
||||||
|
Notice,
|
||||||
|
parseFrontMatterAliases,
|
||||||
|
Platform,
|
||||||
|
} from 'obsidian'
|
||||||
|
import { isSearchMatch, type SearchMatch } from '../globals'
|
||||||
|
import { type BinaryLike, createHash } from 'crypto'
|
||||||
|
import { md5 } from 'pure-md5'
|
||||||
|
|
||||||
|
export function pathWithoutFilename(path: string): string {
|
||||||
|
const split = path.split('/')
|
||||||
|
split.pop()
|
||||||
|
return split.join('/')
|
||||||
|
}
|
||||||
|
|
||||||
|
export function wait(ms: number): Promise<void> {
|
||||||
|
return new Promise(resolve => {
|
||||||
|
setTimeout(resolve, ms)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the positions of all occurences of `val` inside of `text`
|
||||||
|
* https://stackoverflow.com/a/58828841
|
||||||
|
* @param text
|
||||||
|
* @param regex
|
||||||
|
* @returns
|
||||||
|
*/
|
||||||
|
export function getAllIndices(text: string, regex: RegExp): SearchMatch[] {
|
||||||
|
return [...text.matchAll(regex)]
|
||||||
|
.map(o => ({ match: o[0], offset: o.index }))
|
||||||
|
.filter(isSearchMatch)
|
||||||
|
}
|
||||||
|
|
||||||
|
export function extractHeadingsFromCache(
|
||||||
|
cache: CachedMetadata,
|
||||||
|
level: number
|
||||||
|
): string[] {
|
||||||
|
return (
|
||||||
|
cache.headings?.filter(h => h.level === level).map(h => h.heading) ?? []
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
export function loopIndex(index: number, nbItems: number): number {
|
||||||
|
return (index + nbItems) % nbItems
|
||||||
|
}
|
||||||
|
|
||||||
|
function mapAsync<T, U>(
|
||||||
|
array: T[],
|
||||||
|
callbackfn: (value: T, index: number, array: T[]) => Promise<U>
|
||||||
|
): Promise<U[]> {
|
||||||
|
return Promise.all(array.map(callbackfn))
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* https://stackoverflow.com/a/53508547
|
||||||
|
* @param array
|
||||||
|
* @param callbackfn
|
||||||
|
* @returns
|
||||||
|
*/
|
||||||
|
export async function filterAsync<T>(
|
||||||
|
array: T[],
|
||||||
|
callbackfn: (value: T, index: number, array: T[]) => Promise<boolean>
|
||||||
|
): Promise<T[]> {
|
||||||
|
const filterMap = await mapAsync(array, callbackfn)
|
||||||
|
return array.filter((_value, index) => filterMap[index])
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A simple function to strip bold and italic markdown chars from a string
|
||||||
|
* @param text
|
||||||
|
* @returns
|
||||||
|
*/
|
||||||
|
export function stripMarkdownCharacters(text: string): string {
|
||||||
|
return text.replace(/(\*|_)+(.+?)(\*|_)+/g, (_match, _p1, p2) => p2)
|
||||||
|
}
|
||||||
|
|
||||||
|
export function getAliasesFromMetadata(
|
||||||
|
metadata: CachedMetadata | null
|
||||||
|
): string[] {
|
||||||
|
return metadata?.frontmatter
|
||||||
|
? parseFrontMatterAliases(metadata.frontmatter) ?? []
|
||||||
|
: []
|
||||||
|
}
|
||||||
|
|
||||||
|
export function getTagsFromMetadata(metadata: CachedMetadata | null): string[] {
|
||||||
|
let tags = metadata ? getAllTags(metadata) ?? [] : []
|
||||||
|
// This will "un-nest" tags that are in the form of "#tag/subtag"
|
||||||
|
// A tag like "#tag/subtag" will be split into 3 tags: '#tag/subtag", "#tag" and "#subtag"
|
||||||
|
// https://github.com/scambier/obsidian-locator/issues/146
|
||||||
|
tags = [
|
||||||
|
...new Set(
|
||||||
|
tags.reduce((acc, tag) => {
|
||||||
|
return [
|
||||||
|
...acc,
|
||||||
|
...tag
|
||||||
|
.split('/')
|
||||||
|
.filter(t => t)
|
||||||
|
.map(t => (t.startsWith('#') ? t : `#${t}`)),
|
||||||
|
tag,
|
||||||
|
]
|
||||||
|
}, [] as string[])
|
||||||
|
),
|
||||||
|
]
|
||||||
|
return tags
|
||||||
|
}
|
||||||
|
|
||||||
|
// Define cached diacritics regex once outside the function
|
||||||
|
const japaneseDiacritics = ['\\u30FC', '\\u309A', '\\u3099']
|
||||||
|
const regexpExclude = japaneseDiacritics.join('|')
|
||||||
|
const diacriticsRegex = new RegExp(`(?!${regexpExclude})\\p{Diacritic}`, 'gu')
|
||||||
|
|
||||||
|
/**
|
||||||
|
* https://stackoverflow.com/a/37511463
|
||||||
|
*/
|
||||||
|
export function removeDiacritics(str: string, arabic = false): string {
|
||||||
|
if (str === null || str === undefined) {
|
||||||
|
return ''
|
||||||
|
}
|
||||||
|
|
||||||
|
if (arabic) {
|
||||||
|
// Arabic diacritics
|
||||||
|
// https://stackoverflow.com/a/40959537
|
||||||
|
str = str
|
||||||
|
.replace(/([^\u0621-\u063A\u0641-\u064A\u0660-\u0669a-zA-Z 0-9])/g, '')
|
||||||
|
.replace(/(آ|إ|أ)/g, 'ا')
|
||||||
|
.replace(/(ة)/g, 'ه')
|
||||||
|
.replace(/(ئ|ؤ)/g, 'ء')
|
||||||
|
.replace(/(ى)/g, 'ي')
|
||||||
|
for (let i = 0; i < 10; i++) {
|
||||||
|
str.replace(String.fromCharCode(0x660 + i), String.fromCharCode(48 + i))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Keep backticks for code blocks, because otherwise they are removed by the .normalize() function
|
||||||
|
// https://stackoverflow.com/a/36100275
|
||||||
|
str = str.replaceAll('`', '[__locator__backtick__]')
|
||||||
|
// Keep caret same as above
|
||||||
|
str = str.replaceAll('^', '[__locator__caret__]')
|
||||||
|
// To keep right form of Korean character, NFC normalization is necessary
|
||||||
|
str = str.normalize('NFD').replace(diacriticsRegex, '').normalize('NFC')
|
||||||
|
str = str.replaceAll('[__locator__backtick__]', '`')
|
||||||
|
str = str.replaceAll('[__locator__caret__]', '^')
|
||||||
|
return str
|
||||||
|
}
|
||||||
|
|
||||||
|
export function getCtrlKeyLabel(): 'Ctrl' | '⌘' {
|
||||||
|
return Platform.isMacOS ? '⌘' : 'Ctrl'
|
||||||
|
}
|
||||||
|
|
||||||
|
export function getAltKeyLabel(): 'Alt' | '⌥' {
|
||||||
|
return Platform.isMacOS ? '⌥' : 'Alt'
|
||||||
|
}
|
||||||
|
|
||||||
|
export function isFileImage(path: string): boolean {
|
||||||
|
const ext = getExtension(path)
|
||||||
|
return (
|
||||||
|
ext === 'png' ||
|
||||||
|
ext === 'jpg' ||
|
||||||
|
ext === 'jpeg' ||
|
||||||
|
ext === 'webp' ||
|
||||||
|
ext === 'gif'
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
export function isFilePDF(path: string): boolean {
|
||||||
|
return getExtension(path) === 'pdf'
|
||||||
|
}
|
||||||
|
|
||||||
|
export function isFileOffice(path: string): boolean {
|
||||||
|
const ext = getExtension(path)
|
||||||
|
return ext === 'docx' || ext === 'xlsx'
|
||||||
|
}
|
||||||
|
|
||||||
|
export function isFileCanvas(path: string): boolean {
|
||||||
|
return path.endsWith('.canvas')
|
||||||
|
}
|
||||||
|
|
||||||
|
export function isFileExcalidraw(path: string): boolean {
|
||||||
|
return path.endsWith('.excalidraw')
|
||||||
|
}
|
||||||
|
|
||||||
|
export function isFileFromDataloom(path: string): boolean {
|
||||||
|
return path.endsWith('.loom')
|
||||||
|
}
|
||||||
|
|
||||||
|
export function getExtension(path: string): string {
|
||||||
|
const split = path.split('.')
|
||||||
|
return split[split.length - 1] ?? ''
|
||||||
|
}
|
||||||
|
|
||||||
|
export function makeMD5(data: BinaryLike): string {
|
||||||
|
if (Platform.isMobileApp) {
|
||||||
|
// A node-less implementation, but since we're not hashing the same data
|
||||||
|
// (arrayBuffer vs stringified array) the hash will be different
|
||||||
|
return md5(data.toString())
|
||||||
|
}
|
||||||
|
return createHash('md5').update(data).digest('hex')
|
||||||
|
}
|
||||||
|
|
||||||
|
export function chunkArray<T>(arr: T[], len: number): T[][] {
|
||||||
|
const chunks = []
|
||||||
|
let i = 0
|
||||||
|
const n = arr.length
|
||||||
|
|
||||||
|
while (i < n) {
|
||||||
|
chunks.push(arr.slice(i, (i += len)))
|
||||||
|
}
|
||||||
|
|
||||||
|
return chunks
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Converts a 'fooBarBAZLorem' into ['foo', 'Bar', 'BAZ', 'Lorem']
|
||||||
|
* If the string isn't camelCase, returns an empty array
|
||||||
|
* @param text
|
||||||
|
*/
|
||||||
|
export function splitCamelCase(text: string): string[] {
|
||||||
|
// if no camel case found, do nothing
|
||||||
|
if (!/[a-z][A-Z]/.test(text)) {
|
||||||
|
return []
|
||||||
|
}
|
||||||
|
const splittedText = text
|
||||||
|
.replace(/([a-z](?=[A-Z]))/g, '$1 ')
|
||||||
|
.split(' ')
|
||||||
|
.filter(t => t)
|
||||||
|
return splittedText
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Converts a 'foo-bar-baz' into ['foo', 'bar', 'baz']
|
||||||
|
* If the string isn't hyphenated, returns an empty array
|
||||||
|
* @param text
|
||||||
|
*/
|
||||||
|
export function splitHyphens(text: string): string[] {
|
||||||
|
if (!text.includes('-')) {
|
||||||
|
return []
|
||||||
|
}
|
||||||
|
return text.split('-').filter(t => t)
|
||||||
|
}
|
||||||
|
|
||||||
|
export function logVerbose(...args: any[]): void {
|
||||||
|
printVerbose(console.debug, ...args)
|
||||||
|
}
|
||||||
|
|
||||||
|
export function warnVerbose(...args: any[]): void {
|
||||||
|
printVerbose(console.warn, ...args)
|
||||||
|
}
|
||||||
|
|
||||||
|
let verboseLoggingEnabled = false
|
||||||
|
export function enableVerboseLogging(enable: boolean): void {
|
||||||
|
verboseLoggingEnabled = enable
|
||||||
|
}
|
||||||
|
|
||||||
|
function printVerbose(fn: (...args: any[]) => any, ...args: any[]): void {
|
||||||
|
if (verboseLoggingEnabled) {
|
||||||
|
fn(...args)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export const countError = (() => {
|
||||||
|
let counter = 0
|
||||||
|
let alreadyWarned = false
|
||||||
|
setTimeout(() => {
|
||||||
|
if (counter > 0) {
|
||||||
|
--counter
|
||||||
|
}
|
||||||
|
}, 1000)
|
||||||
|
return (immediate = false) => {
|
||||||
|
// 3 errors in 1 second, there's probably something wrong
|
||||||
|
if ((++counter >= 5 || immediate) && !alreadyWarned) {
|
||||||
|
alreadyWarned = true
|
||||||
|
new Notice(
|
||||||
|
'Locator ⚠️ There might be an issue with your cache. You should clean it in Locator settings and restart Obsidian.',
|
||||||
|
5000
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})()
|
23
src/typings/types-obsidian.d.ts
vendored
Normal file
23
src/typings/types-obsidian.d.ts
vendored
Normal file
|
@ -0,0 +1,23 @@
|
||||||
|
import type { MetadataCache, ViewState, Vault } from 'obsidian'
|
||||||
|
|
||||||
|
declare module 'obsidian' {
|
||||||
|
interface MetadataCache {
|
||||||
|
isUserIgnored?(path: string): boolean
|
||||||
|
}
|
||||||
|
|
||||||
|
interface ViewState {
|
||||||
|
state?: {
|
||||||
|
file?: string
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
interface Vault {
|
||||||
|
getConfig(string): unknown
|
||||||
|
}
|
||||||
|
|
||||||
|
interface App {
|
||||||
|
appId: string
|
||||||
|
loadLocalStorage(key: string): string | null
|
||||||
|
saveLocalStorage(key: string, value?: string): void
|
||||||
|
}
|
||||||
|
}
|
27
tsconfig.json
Normal file
27
tsconfig.json
Normal file
|
@ -0,0 +1,27 @@
|
||||||
|
{
|
||||||
|
"compilerOptions": {
|
||||||
|
"verbatimModuleSyntax": true,
|
||||||
|
"skipLibCheck": true,
|
||||||
|
"baseUrl": ".",
|
||||||
|
"inlineSourceMap": true,
|
||||||
|
"inlineSources": true,
|
||||||
|
"module": "ESNext",
|
||||||
|
"target": "ES2021",
|
||||||
|
"allowJs": true,
|
||||||
|
"noImplicitAny": true,
|
||||||
|
"moduleResolution": "node",
|
||||||
|
"importHelpers": true,
|
||||||
|
"resolveJsonModule": true,
|
||||||
|
"allowSyntheticDefaultImports": true,
|
||||||
|
"isolatedModules": true,
|
||||||
|
"strictNullChecks": true,
|
||||||
|
"lib": [
|
||||||
|
"DOM",
|
||||||
|
"ES2021"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"include": [
|
||||||
|
"**/*.ts",
|
||||||
|
"**/*.svelte"
|
||||||
|
]
|
||||||
|
}
|
15
version-bump.mjs
Normal file
15
version-bump.mjs
Normal file
|
@ -0,0 +1,15 @@
|
||||||
|
import { readFileSync, writeFileSync } from 'fs'
|
||||||
|
|
||||||
|
const targetVersion = process.env.npm_package_version
|
||||||
|
console.log(`Bumping version to ${targetVersion}`)
|
||||||
|
|
||||||
|
// read minAppVersion from manifest.json and bump version to target version
|
||||||
|
const manifest = JSON.parse(readFileSync('manifest-beta.json', 'utf8'))
|
||||||
|
const { minAppVersion } = manifest
|
||||||
|
manifest.version = targetVersion
|
||||||
|
writeFileSync('manifest-beta.json', JSON.stringify(manifest, null, '\t'))
|
||||||
|
|
||||||
|
// update versions.json with target version and minAppVersion from manifest.json
|
||||||
|
const versions = JSON.parse(readFileSync('versions.json', 'utf8'))
|
||||||
|
versions[targetVersion] = minAppVersion
|
||||||
|
writeFileSync('versions.json', JSON.stringify(versions, null, '\t'))
|
161
versions.json
Normal file
161
versions.json
Normal file
|
@ -0,0 +1,161 @@
|
||||||
|
{
|
||||||
|
"0.1.0": "0.14.2",
|
||||||
|
"0.1.1": "0.14.2",
|
||||||
|
"0.1.2": "0.14.2",
|
||||||
|
"0.1.3": "0.14.2",
|
||||||
|
"0.1.4": "0.14.2",
|
||||||
|
"0.1.5": "0.14.2",
|
||||||
|
"0.1.6": "0.14.2",
|
||||||
|
"0.1.7": "0.14.2",
|
||||||
|
"0.1.8": "0.14.2",
|
||||||
|
"0.2.0": "0.14.2",
|
||||||
|
"0.2.1": "0.14.2",
|
||||||
|
"0.2.2": "0.14.2",
|
||||||
|
"0.2.3": "0.14.2",
|
||||||
|
"0.2.4": "0.14.2",
|
||||||
|
"0.2.5": "0.14.2",
|
||||||
|
"1.0.0": "0.14.2",
|
||||||
|
"1.0.1": "0.14.2",
|
||||||
|
"1.1.0": "0.14.2",
|
||||||
|
"1.1.1": "0.14.2",
|
||||||
|
"1.2.0": "0.14.2",
|
||||||
|
"1.2.1": "0.14.2",
|
||||||
|
"1.3.0-beta": "0.14.2",
|
||||||
|
"1.3.1-beta": "0.14.2",
|
||||||
|
"1.3.2-beta": "0.14.2",
|
||||||
|
"1.3.3-beta": "0.14.2",
|
||||||
|
"1.3.3": "0.14.2",
|
||||||
|
"1.3.4": "0.14.2",
|
||||||
|
"1.3.5-beta1": "0.14.2",
|
||||||
|
"1.3.5-beta2": "0.14.2",
|
||||||
|
"1.3.5-beta3": "0.14.2",
|
||||||
|
"1.4.0-beta4": "0.14.2",
|
||||||
|
"1.4.0": "0.14.2",
|
||||||
|
"1.4.1": "0.14.2",
|
||||||
|
"1.4.2": "0.14.2",
|
||||||
|
"1.4.3": "0.14.2",
|
||||||
|
"1.5.0-beta1": "0.15.6",
|
||||||
|
"1.5.0-beta2": "0.15.6",
|
||||||
|
"1.5.0-beta3": "0.15.6",
|
||||||
|
"1.5.0": "0.15.6",
|
||||||
|
"1.5.1": "0.15.6",
|
||||||
|
"1.5.2": "0.15.6",
|
||||||
|
"1.6.0-beta1": "0.15.6",
|
||||||
|
"1.6.0-beta2": "0.15.6",
|
||||||
|
"1.6.0-beta3": "0.15.6",
|
||||||
|
"1.6.0-beta4": "0.15.6",
|
||||||
|
"1.6.0": "0.15.6",
|
||||||
|
"1.6.1": "0.15.6",
|
||||||
|
"1.6.2": "0.15.6",
|
||||||
|
"1.6.3": "0.15.6",
|
||||||
|
"1.6.4": "0.15.6",
|
||||||
|
"1.6.5": "0.15.6",
|
||||||
|
"1.6.5-beta": "1.0.0",
|
||||||
|
"1.6.5-beta.2": "1.0.0",
|
||||||
|
"1.6.5-beta.3": "1.0.0",
|
||||||
|
"1.6.5-beta.4": "1.0.0",
|
||||||
|
"1.6.5-beta.5": "1.0.0",
|
||||||
|
"1.6.5-beta.6": "1.0.0",
|
||||||
|
"1.6.5-beta.7": "1.0.0",
|
||||||
|
"1.6.5-beta.8": "1.0.0",
|
||||||
|
"1.7": "1.0.0",
|
||||||
|
"1.7.1": "1.0.0",
|
||||||
|
"1.7.2": "1.0.0",
|
||||||
|
"1.7.3": "1.0.0",
|
||||||
|
"1.7.4": "1.0.0",
|
||||||
|
"1.7.5": "1.0.0",
|
||||||
|
"1.7.6": "1.0.0",
|
||||||
|
"1.7.7": "1.0.0",
|
||||||
|
"1.7.8": "1.0.0",
|
||||||
|
"1.7.9": "1.0.0",
|
||||||
|
"1.7.10": "1.0.0",
|
||||||
|
"1.8.0-beta.1": "1.0.0",
|
||||||
|
"1.8.0-beta.2": "1.0.0",
|
||||||
|
"1.8.0-beta.3": "1.0.0",
|
||||||
|
"1.8.0-beta.4": "1.0.0",
|
||||||
|
"1.8.0-beta.5": "1.0.0",
|
||||||
|
"1.8.0-beta.6": "1.0.0",
|
||||||
|
"1.8.0-beta.7": "1.0.0",
|
||||||
|
"1.8.0": "1.0.0",
|
||||||
|
"1.8.1": "1.0.0",
|
||||||
|
"1.9.0-beta.2": "1.0.0",
|
||||||
|
"1.9.0-beta.3": "1.0.0",
|
||||||
|
"1.9.0-beta.4": "1.0.0",
|
||||||
|
"1.9.0-beta.5": "1.0.0",
|
||||||
|
"1.9.0-beta.6": "1.0.0",
|
||||||
|
"1.9.0-beta.7": "1.0.0",
|
||||||
|
"1.9.0-beta.8": "1.0.0",
|
||||||
|
"1.9.0": "1.0.0",
|
||||||
|
"1.9.1": "1.0.0",
|
||||||
|
"1.10.0-beta.1": "1.0.0",
|
||||||
|
"1.10.0-beta.2": "1.0.0",
|
||||||
|
"1.10.0-beta.3": "1.0.0",
|
||||||
|
"1.10.0-beta.4": "1.0.0",
|
||||||
|
"1.10.0": "1.0.0",
|
||||||
|
"1.10.1": "1.0.0",
|
||||||
|
"1.11.0-beta.1": "1.0.0",
|
||||||
|
"1.11.0": "1.0.0",
|
||||||
|
"1.11.1": "1.0.0",
|
||||||
|
"1.12.0": "1.0.0",
|
||||||
|
"1.12.1-beta.1": "1.0.0",
|
||||||
|
"1.12.1": "1.0.0",
|
||||||
|
"1.12.2": "1.0.0",
|
||||||
|
"1.12.3": "1.0.0",
|
||||||
|
"1.13.0-beta.1": "1.0.0",
|
||||||
|
"1.13.0-beta.2": "1.0.0",
|
||||||
|
"1.13.0": "1.0.0",
|
||||||
|
"1.14.0-beta.1": "1.0.0",
|
||||||
|
"1.14.0": "1.0.0",
|
||||||
|
"1.14.1-beta.1": "1.0.0",
|
||||||
|
"1.14.1-beta.2": "1.0.0",
|
||||||
|
"1.14.1": "1.0.0",
|
||||||
|
"1.14.2": "1.0.0",
|
||||||
|
"1.15.0-beta.1": "1.0.0",
|
||||||
|
"1.15.0-beta.2": "1.0.0",
|
||||||
|
"1.15.0": "1.0.0",
|
||||||
|
"1.15.1": "1.3.0",
|
||||||
|
"1.16.0-beta.1": "1.3.0",
|
||||||
|
"1.16.0": "1.3.0",
|
||||||
|
"1.17.0": "1.3.0",
|
||||||
|
"1.17.1": "1.3.0",
|
||||||
|
"1.18.0": "1.3.0",
|
||||||
|
"1.18.1": "1.3.0",
|
||||||
|
"1.19.0-beta.1": "1.3.0",
|
||||||
|
"1.19.0": "1.3.0",
|
||||||
|
"1.20.0-beta.1": "1.3.0",
|
||||||
|
"1.20.0": "1.3.0",
|
||||||
|
"1.20.1": "1.3.0",
|
||||||
|
"1.20.2": "1.3.0",
|
||||||
|
"1.20.3": "1.3.0",
|
||||||
|
"1.20.4": "1.3.0",
|
||||||
|
"1.21.0": "1.3.0",
|
||||||
|
"1.21.1": "1.3.0",
|
||||||
|
"1.22.0-beta.1": "1.3.0",
|
||||||
|
"1.22.0-beta.2": "1.3.0",
|
||||||
|
"1.22.0-beta.3": "1.3.0",
|
||||||
|
"1.22.0": "1.3.0",
|
||||||
|
"1.22.1": "1.3.0",
|
||||||
|
"1.22.2": "1.3.0",
|
||||||
|
"1.23.0-beta.1": "1.3.0",
|
||||||
|
"1.23.0-beta.2": "1.3.0",
|
||||||
|
"1.23.0-beta.3": "1.3.0",
|
||||||
|
"1.23.0-beta.4": "1.3.0",
|
||||||
|
"1.23.0-beta.5": "1.3.0",
|
||||||
|
"1.23.0": "1.3.0",
|
||||||
|
"1.24.0-beta.1": "1.3.0",
|
||||||
|
"1.23.1": "1.3.0",
|
||||||
|
"1.24.0-beta.2": "1.3.0",
|
||||||
|
"1.24.0-beta.3": "1.3.0",
|
||||||
|
"1.24.0": "1.3.0",
|
||||||
|
"1.24.1": "1.3.0",
|
||||||
|
"1.25.0-beta.1": "1.3.0",
|
||||||
|
"1.25.0-beta.2": "1.3.0",
|
||||||
|
"1.25.0": "1.3.0",
|
||||||
|
"1.25.1": "1.3.0",
|
||||||
|
"1.25.2": "1.3.0",
|
||||||
|
"1.26.0": "1.3.0",
|
||||||
|
"1.26.1": "1.3.0",
|
||||||
|
"1.27.0-beta.1": "1.3.0",
|
||||||
|
"1.27.0": "1.3.0",
|
||||||
|
"1.27.1": "1.7.2"
|
||||||
|
}
|
Loading…
Reference in New Issue
Block a user