mirror of
https://github.com/rust-lang/rust.git
synced 2026-04-26 13:01:27 +03:00
rustdoc-search: search backend with partitioned suffix tree
This commit is contained in:
+10
@@ -4812,6 +4812,7 @@ dependencies = [
|
||||
"serde_json",
|
||||
"sha2",
|
||||
"smallvec",
|
||||
"stringdex",
|
||||
"tempfile",
|
||||
"threadpool",
|
||||
"tracing",
|
||||
@@ -5225,6 +5226,15 @@ dependencies = [
|
||||
"quote",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "stringdex"
|
||||
version = "0.0.1-alpha4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2841fd43df5b1ff1b042e167068a1fe9b163dc93041eae56ab2296859013a9a0"
|
||||
dependencies = [
|
||||
"stacker",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "strsim"
|
||||
version = "0.11.1"
|
||||
|
||||
@@ -1 +1 @@
|
||||
8.6.0
|
||||
8.57.1
|
||||
|
||||
@@ -15,6 +15,7 @@ import os.path
|
||||
import re
|
||||
import shlex
|
||||
from collections import namedtuple
|
||||
from pathlib import Path
|
||||
|
||||
try:
|
||||
from html.parser import HTMLParser
|
||||
@@ -242,6 +243,11 @@ class CachedFiles(object):
|
||||
return self.last_path
|
||||
|
||||
def get_absolute_path(self, path):
|
||||
if "*" in path:
|
||||
paths = list(Path(self.root).glob(path))
|
||||
if len(paths) != 1:
|
||||
raise FailedCheck("glob path does not resolve to one file")
|
||||
path = str(paths[0])
|
||||
return os.path.join(self.root, path)
|
||||
|
||||
def get_file(self, path):
|
||||
|
||||
@@ -21,6 +21,7 @@ rustdoc-json-types = { path = "../rustdoc-json-types" }
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = "1.0"
|
||||
smallvec = "1.8.1"
|
||||
stringdex = { version = "0.0.1-alpha4" }
|
||||
tempfile = "3"
|
||||
threadpool = "1.8.1"
|
||||
tracing = "0.1"
|
||||
|
||||
@@ -10,6 +10,7 @@ fn main() {
|
||||
"static/css/normalize.css",
|
||||
"static/js/main.js",
|
||||
"static/js/search.js",
|
||||
"static/js/stringdex.js",
|
||||
"static/js/settings.js",
|
||||
"static/js/src-script.js",
|
||||
"static/js/storage.js",
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
use std::mem;
|
||||
|
||||
use rustc_ast::join_path_syms;
|
||||
use rustc_data_structures::fx::{FxHashMap, FxHashSet, FxIndexMap, FxIndexSet};
|
||||
use rustc_hir::StabilityLevel;
|
||||
use rustc_hir::def_id::{CrateNum, DefId, DefIdMap, DefIdSet};
|
||||
@@ -574,7 +573,6 @@ fn add_item_to_search_index(tcx: TyCtxt<'_>, cache: &mut Cache, item: &clean::It
|
||||
clean::ItemKind::ImportItem(import) => import.source.did.unwrap_or(item_def_id),
|
||||
_ => item_def_id,
|
||||
};
|
||||
let path = join_path_syms(parent_path);
|
||||
let impl_id = if let Some(ParentStackItem::Impl { item_id, .. }) = cache.parent_stack.last() {
|
||||
item_id.as_def_id()
|
||||
} else {
|
||||
@@ -593,11 +591,11 @@ fn add_item_to_search_index(tcx: TyCtxt<'_>, cache: &mut Cache, item: &clean::It
|
||||
ty: item.type_(),
|
||||
defid: Some(defid),
|
||||
name,
|
||||
path,
|
||||
module_path: parent_path.to_vec(),
|
||||
desc,
|
||||
parent: parent_did,
|
||||
parent_idx: None,
|
||||
exact_path: None,
|
||||
exact_module_path: None,
|
||||
impl_id,
|
||||
search_type,
|
||||
aliases,
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
|
||||
use rustc_hir::def::{CtorOf, DefKind, MacroKinds};
|
||||
use rustc_span::hygiene::MacroKind;
|
||||
use serde::{Serialize, Serializer};
|
||||
use serde::{Deserialize, Deserializer, Serialize, Serializer, de};
|
||||
|
||||
use crate::clean;
|
||||
|
||||
@@ -68,6 +68,52 @@ fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
}
|
||||
}
|
||||
|
||||
impl<'de> Deserialize<'de> for ItemType {
|
||||
fn deserialize<D>(deserializer: D) -> Result<ItemType, D::Error>
|
||||
where
|
||||
D: Deserializer<'de>,
|
||||
{
|
||||
struct ItemTypeVisitor;
|
||||
impl<'de> de::Visitor<'de> for ItemTypeVisitor {
|
||||
type Value = ItemType;
|
||||
fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(formatter, "an integer between 0 and 25")
|
||||
}
|
||||
fn visit_u64<E: de::Error>(self, v: u64) -> Result<ItemType, E> {
|
||||
Ok(match v {
|
||||
0 => ItemType::Keyword,
|
||||
1 => ItemType::Primitive,
|
||||
2 => ItemType::Module,
|
||||
3 => ItemType::ExternCrate,
|
||||
4 => ItemType::Import,
|
||||
5 => ItemType::Struct,
|
||||
6 => ItemType::Enum,
|
||||
7 => ItemType::Function,
|
||||
8 => ItemType::TypeAlias,
|
||||
9 => ItemType::Static,
|
||||
10 => ItemType::Trait,
|
||||
11 => ItemType::Impl,
|
||||
12 => ItemType::TyMethod,
|
||||
13 => ItemType::Method,
|
||||
14 => ItemType::StructField,
|
||||
15 => ItemType::Variant,
|
||||
16 => ItemType::Macro,
|
||||
17 => ItemType::AssocType,
|
||||
18 => ItemType::Constant,
|
||||
19 => ItemType::AssocConst,
|
||||
20 => ItemType::Union,
|
||||
21 => ItemType::ForeignType,
|
||||
23 => ItemType::ProcAttribute,
|
||||
24 => ItemType::ProcDerive,
|
||||
25 => ItemType::TraitAlias,
|
||||
_ => return Err(E::missing_field("unknown number")),
|
||||
})
|
||||
}
|
||||
}
|
||||
deserializer.deserialize_any(ItemTypeVisitor)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<&'a clean::Item> for ItemType {
|
||||
fn from(item: &'a clean::Item) -> ItemType {
|
||||
let kind = match &item.kind {
|
||||
@@ -198,6 +244,10 @@ pub(crate) fn is_method(&self) -> bool {
|
||||
pub(crate) fn is_adt(&self) -> bool {
|
||||
matches!(self, ItemType::Struct | ItemType::Union | ItemType::Enum)
|
||||
}
|
||||
/// Keep this the same as isFnLikeTy in search.js
|
||||
pub(crate) fn is_fn_like(&self) -> bool {
|
||||
matches!(self, ItemType::Function | ItemType::Method | ItemType::TyMethod)
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for ItemType {
|
||||
|
||||
@@ -27,6 +27,7 @@ pub(crate) struct Layout {
|
||||
|
||||
pub(crate) struct Page<'a> {
|
||||
pub(crate) title: &'a str,
|
||||
pub(crate) short_title: &'a str,
|
||||
pub(crate) css_class: &'a str,
|
||||
pub(crate) root_path: &'a str,
|
||||
pub(crate) static_root_path: Option<&'a str>,
|
||||
|
||||
@@ -204,6 +204,18 @@ fn render_item(&mut self, it: &clean::Item, is_module: bool) -> String {
|
||||
if !is_module {
|
||||
title.push_str(it.name.unwrap().as_str());
|
||||
}
|
||||
let short_title;
|
||||
let short_title = if is_module {
|
||||
let module_name = self.current.last().unwrap();
|
||||
short_title = if it.is_crate() {
|
||||
format!("Crate {module_name}")
|
||||
} else {
|
||||
format!("Module {module_name}")
|
||||
};
|
||||
&short_title[..]
|
||||
} else {
|
||||
it.name.as_ref().unwrap().as_str()
|
||||
};
|
||||
if !it.is_primitive() && !it.is_keyword() {
|
||||
if !is_module {
|
||||
title.push_str(" in ");
|
||||
@@ -240,6 +252,7 @@ fn render_item(&mut self, it: &clean::Item, is_module: bool) -> String {
|
||||
root_path: &self.root_path(),
|
||||
static_root_path: self.shared.static_root_path.as_deref(),
|
||||
title: &title,
|
||||
short_title,
|
||||
description: &desc,
|
||||
resource_suffix: &self.shared.resource_suffix,
|
||||
rust_logo: has_doc_flag(self.tcx(), LOCAL_CRATE.as_def_id(), sym::rust_logo),
|
||||
@@ -617,6 +630,7 @@ fn after_krate(mut self) -> Result<(), Error> {
|
||||
let shared = &self.shared;
|
||||
let mut page = layout::Page {
|
||||
title: "List of all items in this crate",
|
||||
short_title: "All",
|
||||
css_class: "mod sys",
|
||||
root_path: "../",
|
||||
static_root_path: shared.static_root_path.as_deref(),
|
||||
|
||||
@@ -130,11 +130,11 @@ pub(crate) struct IndexItem {
|
||||
pub(crate) ty: ItemType,
|
||||
pub(crate) defid: Option<DefId>,
|
||||
pub(crate) name: Symbol,
|
||||
pub(crate) path: String,
|
||||
pub(crate) module_path: Vec<Symbol>,
|
||||
pub(crate) desc: String,
|
||||
pub(crate) parent: Option<DefId>,
|
||||
pub(crate) parent_idx: Option<isize>,
|
||||
pub(crate) exact_path: Option<String>,
|
||||
pub(crate) parent_idx: Option<usize>,
|
||||
pub(crate) exact_module_path: Option<Vec<Symbol>>,
|
||||
pub(crate) impl_id: Option<DefId>,
|
||||
pub(crate) search_type: Option<IndexItemFunctionType>,
|
||||
pub(crate) aliases: Box<[Symbol]>,
|
||||
@@ -150,6 +150,19 @@ struct RenderType {
|
||||
}
|
||||
|
||||
impl RenderType {
|
||||
fn size(&self) -> usize {
|
||||
let mut size = 1;
|
||||
if let Some(generics) = &self.generics {
|
||||
size += generics.iter().map(RenderType::size).sum::<usize>();
|
||||
}
|
||||
if let Some(bindings) = &self.bindings {
|
||||
for (_, constraints) in bindings.iter() {
|
||||
size += 1;
|
||||
size += constraints.iter().map(RenderType::size).sum::<usize>();
|
||||
}
|
||||
}
|
||||
size
|
||||
}
|
||||
// Types are rendered as lists of lists, because that's pretty compact.
|
||||
// The contents of the lists are always integers in self-terminating hex
|
||||
// form, handled by `RenderTypeId::write_to_string`, so no commas are
|
||||
@@ -191,6 +204,62 @@ fn write_optional_id(id: Option<RenderTypeId>, string: &mut String) {
|
||||
write_optional_id(self.id, string);
|
||||
}
|
||||
}
|
||||
fn read_from_bytes(string: &[u8]) -> (RenderType, usize) {
|
||||
let mut i = 0;
|
||||
if string[i] == b'{' {
|
||||
i += 1;
|
||||
let (id, offset) = RenderTypeId::read_from_bytes(&string[i..]);
|
||||
i += offset;
|
||||
let generics = if string[i] == b'{' {
|
||||
i += 1;
|
||||
let mut generics = Vec::new();
|
||||
while string[i] != b'}' {
|
||||
let (ty, offset) = RenderType::read_from_bytes(&string[i..]);
|
||||
i += offset;
|
||||
generics.push(ty);
|
||||
}
|
||||
assert!(string[i] == b'}');
|
||||
i += 1;
|
||||
Some(generics)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
let bindings = if string[i] == b'{' {
|
||||
i += 1;
|
||||
let mut bindings = Vec::new();
|
||||
while string[i] == b'{' {
|
||||
i += 1;
|
||||
let (binding, boffset) = RenderTypeId::read_from_bytes(&string[i..]);
|
||||
i += boffset;
|
||||
let mut bconstraints = Vec::new();
|
||||
assert!(string[i] == b'{');
|
||||
i += 1;
|
||||
while string[i] != b'}' {
|
||||
let (constraint, coffset) = RenderType::read_from_bytes(&string[i..]);
|
||||
i += coffset;
|
||||
bconstraints.push(constraint);
|
||||
}
|
||||
assert!(string[i] == b'}');
|
||||
i += 1;
|
||||
bindings.push((binding.unwrap(), bconstraints));
|
||||
assert!(string[i] == b'}');
|
||||
i += 1;
|
||||
}
|
||||
assert!(string[i] == b'}');
|
||||
i += 1;
|
||||
Some(bindings)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
assert!(string[i] == b'}');
|
||||
i += 1;
|
||||
(RenderType { id, generics, bindings }, i)
|
||||
} else {
|
||||
let (id, offset) = RenderTypeId::read_from_bytes(string);
|
||||
i += offset;
|
||||
(RenderType { id, generics: None, bindings: None }, i)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
|
||||
@@ -212,7 +281,20 @@ fn write_to_string(&self, string: &mut String) {
|
||||
RenderTypeId::Index(idx) => (*idx).try_into().unwrap(),
|
||||
_ => panic!("must convert render types to indexes before serializing"),
|
||||
};
|
||||
search_index::encode::write_vlqhex_to_string(id, string);
|
||||
search_index::encode::write_signed_vlqhex_to_string(id, string);
|
||||
}
|
||||
fn read_from_bytes(string: &[u8]) -> (Option<RenderTypeId>, usize) {
|
||||
let Some((value, offset)) = search_index::encode::read_signed_vlqhex_from_string(string)
|
||||
else {
|
||||
return (None, 0);
|
||||
};
|
||||
let value = isize::try_from(value).unwrap();
|
||||
let ty = match value {
|
||||
..0 => Some(RenderTypeId::Index(value)),
|
||||
0 => None,
|
||||
1.. => Some(RenderTypeId::Index(value - 1)),
|
||||
};
|
||||
(ty, offset)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -226,12 +308,64 @@ pub(crate) struct IndexItemFunctionType {
|
||||
}
|
||||
|
||||
impl IndexItemFunctionType {
|
||||
fn write_to_string<'a>(
|
||||
&'a self,
|
||||
string: &mut String,
|
||||
backref_queue: &mut VecDeque<&'a IndexItemFunctionType>,
|
||||
) {
|
||||
assert!(backref_queue.len() <= 16);
|
||||
fn size(&self) -> usize {
|
||||
self.inputs.iter().map(RenderType::size).sum::<usize>()
|
||||
+ self.output.iter().map(RenderType::size).sum::<usize>()
|
||||
+ self
|
||||
.where_clause
|
||||
.iter()
|
||||
.map(|constraints| constraints.iter().map(RenderType::size).sum::<usize>())
|
||||
.sum::<usize>()
|
||||
}
|
||||
fn read_from_string_without_param_names(string: &[u8]) -> (IndexItemFunctionType, usize) {
|
||||
let mut i = 0;
|
||||
if string[i] == b'`' {
|
||||
return (
|
||||
IndexItemFunctionType {
|
||||
inputs: Vec::new(),
|
||||
output: Vec::new(),
|
||||
where_clause: Vec::new(),
|
||||
param_names: Vec::new(),
|
||||
},
|
||||
1,
|
||||
);
|
||||
}
|
||||
assert_eq!(b'{', string[i]);
|
||||
i += 1;
|
||||
fn read_args_from_string(string: &[u8]) -> (Vec<RenderType>, usize) {
|
||||
let mut i = 0;
|
||||
let mut params = Vec::new();
|
||||
if string[i] == b'{' {
|
||||
// multiple params
|
||||
i += 1;
|
||||
while string[i] != b'}' {
|
||||
let (ty, offset) = RenderType::read_from_bytes(&string[i..]);
|
||||
i += offset;
|
||||
params.push(ty);
|
||||
}
|
||||
i += 1;
|
||||
} else if string[i] != b'}' {
|
||||
let (tyid, offset) = RenderTypeId::read_from_bytes(&string[i..]);
|
||||
params.push(RenderType { id: tyid, generics: None, bindings: None });
|
||||
i += offset;
|
||||
}
|
||||
(params, i)
|
||||
}
|
||||
let (inputs, offset) = read_args_from_string(&string[i..]);
|
||||
i += offset;
|
||||
let (output, offset) = read_args_from_string(&string[i..]);
|
||||
i += offset;
|
||||
let mut where_clause = Vec::new();
|
||||
while string[i] != b'}' {
|
||||
let (constraint, offset) = read_args_from_string(&string[i..]);
|
||||
i += offset;
|
||||
where_clause.push(constraint);
|
||||
}
|
||||
assert_eq!(b'}', string[i], "{} {}", String::from_utf8_lossy(&string), i);
|
||||
i += 1;
|
||||
(IndexItemFunctionType { inputs, output, where_clause, param_names: Vec::new() }, i)
|
||||
}
|
||||
fn write_to_string_without_param_names<'a>(&'a self, string: &mut String) {
|
||||
// If we couldn't figure out a type, just write 0,
|
||||
// which is encoded as `` ` `` (see RenderTypeId::write_to_string).
|
||||
let has_missing = self
|
||||
@@ -241,18 +375,7 @@ fn write_to_string<'a>(
|
||||
.any(|i| i.id.is_none() && i.generics.is_none());
|
||||
if has_missing {
|
||||
string.push('`');
|
||||
} else if let Some(idx) = backref_queue.iter().position(|other| *other == self) {
|
||||
// The backref queue has 16 items, so backrefs use
|
||||
// a single hexit, disjoint from the ones used for numbers.
|
||||
string.push(
|
||||
char::try_from('0' as u32 + u32::try_from(idx).unwrap())
|
||||
.expect("last possible value is '?'"),
|
||||
);
|
||||
} else {
|
||||
backref_queue.push_front(self);
|
||||
if backref_queue.len() > 16 {
|
||||
backref_queue.pop_back();
|
||||
}
|
||||
string.push('{');
|
||||
match &self.inputs[..] {
|
||||
[one] if one.generics.is_none() && one.bindings.is_none() => {
|
||||
|
||||
@@ -35,6 +35,7 @@
|
||||
visibility_print_with_space,
|
||||
};
|
||||
use crate::html::markdown::{HeadingOffset, MarkdownSummaryLine};
|
||||
use crate::html::render::sidebar::filters;
|
||||
use crate::html::render::{document_full, document_item_info};
|
||||
use crate::html::url_parts_builder::UrlPartsBuilder;
|
||||
|
||||
|
||||
@@ -1,72 +1,1169 @@
|
||||
pub(crate) mod encode;
|
||||
|
||||
use std::collections::BTreeSet;
|
||||
use std::collections::hash_map::Entry;
|
||||
use std::collections::{BTreeMap, VecDeque};
|
||||
use std::path::Path;
|
||||
|
||||
use encode::{bitmap_to_string, write_vlqhex_to_string};
|
||||
use rustc_ast::join_path_syms;
|
||||
use rustc_data_structures::fx::{FxHashMap, FxIndexMap};
|
||||
use rustc_data_structures::fx::{FxHashMap, FxHashSet, FxIndexMap};
|
||||
use rustc_middle::ty::TyCtxt;
|
||||
use rustc_span::def_id::DefId;
|
||||
use rustc_span::sym;
|
||||
use rustc_span::symbol::{Symbol, kw};
|
||||
use serde::ser::{Serialize, SerializeSeq, SerializeStruct, Serializer};
|
||||
use serde::de::{self, Deserializer, Error as _};
|
||||
use serde::ser::{SerializeSeq, Serializer};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use stringdex::internals as stringdex_internals;
|
||||
use thin_vec::ThinVec;
|
||||
use tracing::instrument;
|
||||
|
||||
use crate::clean::types::{Function, Generics, ItemId, Type, WherePredicate};
|
||||
use crate::clean::{self, utils};
|
||||
use crate::error::Error;
|
||||
use crate::formats::cache::{Cache, OrphanImplItem};
|
||||
use crate::formats::item_type::ItemType;
|
||||
use crate::html::markdown::short_markdown_summary;
|
||||
use crate::html::render::ordered_json::OrderedJson;
|
||||
use crate::html::render::{self, IndexItem, IndexItemFunctionType, RenderType, RenderTypeId};
|
||||
|
||||
/// The serialized search description sharded version
|
||||
///
|
||||
/// The `index` is a JSON-encoded list of names and other information.
|
||||
///
|
||||
/// The desc has newlined descriptions, split up by size into 128KiB shards.
|
||||
/// For example, `(4, "foo\nbar\nbaz\nquux")`.
|
||||
///
|
||||
/// There is no single, optimal size for these shards, because it depends on
|
||||
/// configuration values that we can't predict or control, such as the version
|
||||
/// of HTTP used (HTTP/1.1 would work better with larger files, while HTTP/2
|
||||
/// and 3 are more agnostic), transport compression (gzip, zstd, etc), whether
|
||||
/// the search query is going to produce a large number of results or a small
|
||||
/// number, the bandwidth delay product of the network...
|
||||
///
|
||||
/// Gzipping some standard library descriptions to guess what transport
|
||||
/// compression will do, the compressed file sizes can be as small as 4.9KiB
|
||||
/// or as large as 18KiB (ignoring the final 1.9KiB shard of leftovers).
|
||||
/// A "reasonable" range for files is for them to be bigger than 1KiB,
|
||||
/// since that's about the amount of data that can be transferred in a
|
||||
/// single TCP packet, and 64KiB, the maximum amount of data that
|
||||
/// TCP can transfer in a single round trip without extensions.
|
||||
///
|
||||
/// [1]: https://en.wikipedia.org/wiki/Maximum_transmission_unit#MTUs_for_common_media
|
||||
/// [2]: https://en.wikipedia.org/wiki/Sliding_window_protocol#Basic_concept
|
||||
/// [3]: https://learn.microsoft.com/en-us/troubleshoot/windows-server/networking/description-tcp-features
|
||||
#[derive(Clone, Debug, Default, Deserialize, Serialize)]
|
||||
pub(crate) struct SerializedSearchIndex {
|
||||
pub(crate) index: OrderedJson,
|
||||
pub(crate) desc: Vec<(usize, String)>,
|
||||
// data from disk
|
||||
names: Vec<String>,
|
||||
path_data: Vec<Option<PathData>>,
|
||||
entry_data: Vec<Option<EntryData>>,
|
||||
descs: Vec<String>,
|
||||
function_data: Vec<Option<FunctionData>>,
|
||||
alias_pointers: Vec<Option<usize>>,
|
||||
// inverted index for concrete types and generics
|
||||
type_data: Vec<Option<TypeData>>,
|
||||
/// inverted index of generics
|
||||
///
|
||||
/// - The outermost list has one entry per alpha-normalized generic.
|
||||
///
|
||||
/// - The second layer is sorted by number of types that appear in the
|
||||
/// type signature. The search engine iterates over these in order from
|
||||
/// smallest to largest. Functions with less stuff in their type
|
||||
/// signature are more likely to be what the user wants, because we never
|
||||
/// show functions that are *missing* parts of the query, so removing..
|
||||
///
|
||||
/// - The final layer is the list of functions.
|
||||
generic_inverted_index: Vec<Vec<Vec<u32>>>,
|
||||
// generated in-memory backref cache
|
||||
#[serde(skip)]
|
||||
crate_paths_index: FxHashMap<(ItemType, Vec<Symbol>), usize>,
|
||||
}
|
||||
|
||||
const DESC_INDEX_SHARD_LEN: usize = 128 * 1024;
|
||||
impl SerializedSearchIndex {
|
||||
fn load(doc_root: &Path, resource_suffix: &str) -> Result<SerializedSearchIndex, Error> {
|
||||
let mut names: Vec<String> = Vec::new();
|
||||
let mut path_data: Vec<Option<PathData>> = Vec::new();
|
||||
let mut entry_data: Vec<Option<EntryData>> = Vec::new();
|
||||
let mut descs: Vec<String> = Vec::new();
|
||||
let mut function_data: Vec<Option<FunctionData>> = Vec::new();
|
||||
let mut type_data: Vec<Option<TypeData>> = Vec::new();
|
||||
let mut alias_pointers: Vec<Option<usize>> = Vec::new();
|
||||
|
||||
let mut generic_inverted_index: Vec<Vec<Vec<u32>>> = Vec::new();
|
||||
|
||||
match perform_read_strings(resource_suffix, doc_root, "name", &mut names) {
|
||||
Ok(()) => {
|
||||
perform_read_serde(resource_suffix, doc_root, "path", &mut path_data)?;
|
||||
perform_read_serde(resource_suffix, doc_root, "entry", &mut entry_data)?;
|
||||
perform_read_strings(resource_suffix, doc_root, "desc", &mut descs)?;
|
||||
perform_read_serde(resource_suffix, doc_root, "function", &mut function_data)?;
|
||||
perform_read_serde(resource_suffix, doc_root, "type", &mut type_data)?;
|
||||
perform_read_serde(resource_suffix, doc_root, "alias", &mut alias_pointers)?;
|
||||
perform_read_postings(
|
||||
resource_suffix,
|
||||
doc_root,
|
||||
"generic_inverted_index",
|
||||
&mut generic_inverted_index,
|
||||
)?;
|
||||
}
|
||||
Err(_) => {
|
||||
names.clear();
|
||||
}
|
||||
}
|
||||
fn perform_read_strings(
|
||||
resource_suffix: &str,
|
||||
doc_root: &Path,
|
||||
column_name: &str,
|
||||
column: &mut Vec<String>,
|
||||
) -> Result<(), Error> {
|
||||
let root_path = doc_root.join(format!("search.index/root{resource_suffix}.js"));
|
||||
let column_path = doc_root.join(format!("search.index/{column_name}/"));
|
||||
stringdex_internals::read_data_from_disk_column(
|
||||
root_path,
|
||||
column_name.as_bytes(),
|
||||
column_path.clone(),
|
||||
&mut |_id, item| {
|
||||
column.push(String::from_utf8(item.to_vec())?);
|
||||
Ok(())
|
||||
},
|
||||
)
|
||||
.map_err(
|
||||
|error: stringdex_internals::ReadDataError<Box<dyn std::error::Error>>| Error {
|
||||
file: column_path,
|
||||
error: format!("failed to read column from disk: {error}"),
|
||||
},
|
||||
)
|
||||
}
|
||||
fn perform_read_serde(
|
||||
resource_suffix: &str,
|
||||
doc_root: &Path,
|
||||
column_name: &str,
|
||||
column: &mut Vec<Option<impl for<'de> Deserialize<'de> + 'static>>,
|
||||
) -> Result<(), Error> {
|
||||
let root_path = doc_root.join(format!("search.index/root{resource_suffix}.js"));
|
||||
let column_path = doc_root.join(format!("search.index/{column_name}/"));
|
||||
stringdex_internals::read_data_from_disk_column(
|
||||
root_path,
|
||||
column_name.as_bytes(),
|
||||
column_path.clone(),
|
||||
&mut |_id, item| {
|
||||
if item.is_empty() {
|
||||
column.push(None);
|
||||
} else {
|
||||
column.push(Some(serde_json::from_slice(item)?));
|
||||
}
|
||||
Ok(())
|
||||
},
|
||||
)
|
||||
.map_err(
|
||||
|error: stringdex_internals::ReadDataError<Box<dyn std::error::Error>>| Error {
|
||||
file: column_path,
|
||||
error: format!("failed to read column from disk: {error}"),
|
||||
},
|
||||
)
|
||||
}
|
||||
fn perform_read_postings(
|
||||
resource_suffix: &str,
|
||||
doc_root: &Path,
|
||||
column_name: &str,
|
||||
column: &mut Vec<Vec<Vec<u32>>>,
|
||||
) -> Result<(), Error> {
|
||||
let root_path = doc_root.join(format!("search.index/root{resource_suffix}.js"));
|
||||
let column_path = doc_root.join(format!("search.index/{column_name}/"));
|
||||
stringdex_internals::read_data_from_disk_column(
|
||||
root_path,
|
||||
column_name.as_bytes(),
|
||||
column_path.clone(),
|
||||
&mut |_id, buf| {
|
||||
let mut postings = Vec::new();
|
||||
encode::read_postings_from_string(&mut postings, buf);
|
||||
column.push(postings);
|
||||
Ok(())
|
||||
},
|
||||
)
|
||||
.map_err(
|
||||
|error: stringdex_internals::ReadDataError<Box<dyn std::error::Error>>| Error {
|
||||
file: column_path,
|
||||
error: format!("failed to read column from disk: {error}"),
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
assert_eq!(names.len(), path_data.len());
|
||||
assert_eq!(path_data.len(), entry_data.len());
|
||||
assert_eq!(entry_data.len(), descs.len());
|
||||
assert_eq!(descs.len(), function_data.len());
|
||||
assert_eq!(function_data.len(), type_data.len());
|
||||
assert_eq!(type_data.len(), alias_pointers.len());
|
||||
|
||||
// generic_inverted_index is not the same length as other columns,
|
||||
// because it's actually a completely different set of objects
|
||||
|
||||
let mut crate_paths_index: FxHashMap<(ItemType, Vec<Symbol>), usize> = FxHashMap::default();
|
||||
for (i, (name, path_data)) in names.iter().zip(path_data.iter()).enumerate() {
|
||||
if let Some(path_data) = path_data {
|
||||
let full_path = if path_data.module_path.is_empty() {
|
||||
vec![Symbol::intern(name)]
|
||||
} else {
|
||||
let mut full_path = path_data.module_path.to_vec();
|
||||
full_path.push(Symbol::intern(name));
|
||||
full_path
|
||||
};
|
||||
crate_paths_index.insert((path_data.ty, full_path), i);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(SerializedSearchIndex {
|
||||
names,
|
||||
path_data,
|
||||
entry_data,
|
||||
descs,
|
||||
function_data,
|
||||
type_data,
|
||||
alias_pointers,
|
||||
generic_inverted_index,
|
||||
crate_paths_index,
|
||||
})
|
||||
}
|
||||
fn push(
|
||||
&mut self,
|
||||
name: String,
|
||||
path_data: Option<PathData>,
|
||||
entry_data: Option<EntryData>,
|
||||
desc: String,
|
||||
function_data: Option<FunctionData>,
|
||||
type_data: Option<TypeData>,
|
||||
alias_pointer: Option<usize>,
|
||||
) -> usize {
|
||||
let index = self.names.len();
|
||||
assert_eq!(self.names.len(), self.path_data.len());
|
||||
if let Some(path_data) = &path_data
|
||||
&& let name = Symbol::intern(&name)
|
||||
&& let fqp = if path_data.module_path.is_empty() {
|
||||
vec![name]
|
||||
} else {
|
||||
let mut v = path_data.module_path.clone();
|
||||
v.push(name);
|
||||
v
|
||||
}
|
||||
&& let Some(&other_path) = self.crate_paths_index.get(&(path_data.ty, fqp))
|
||||
&& self.path_data.get(other_path).map_or(false, Option::is_some)
|
||||
{
|
||||
self.path_data.push(None);
|
||||
} else {
|
||||
self.path_data.push(path_data);
|
||||
}
|
||||
self.names.push(name);
|
||||
assert_eq!(self.entry_data.len(), self.descs.len());
|
||||
self.entry_data.push(entry_data);
|
||||
assert_eq!(self.descs.len(), self.function_data.len());
|
||||
self.descs.push(desc);
|
||||
assert_eq!(self.function_data.len(), self.type_data.len());
|
||||
self.function_data.push(function_data);
|
||||
assert_eq!(self.type_data.len(), self.alias_pointers.len());
|
||||
self.type_data.push(type_data);
|
||||
self.alias_pointers.push(alias_pointer);
|
||||
index
|
||||
}
|
||||
fn push_path(&mut self, name: String, path_data: PathData) -> usize {
|
||||
self.push(name, Some(path_data), None, String::new(), None, None, None)
|
||||
}
|
||||
fn push_type(&mut self, name: String, path_data: PathData, type_data: TypeData) -> usize {
|
||||
self.push(name, Some(path_data), None, String::new(), None, Some(type_data), None)
|
||||
}
|
||||
fn push_alias(&mut self, name: String, alias_pointer: usize) -> usize {
|
||||
self.push(name, None, None, String::new(), None, None, Some(alias_pointer))
|
||||
}
|
||||
|
||||
fn get_id_by_module_path(&mut self, path: &[Symbol]) -> usize {
|
||||
let ty = if path.len() == 1 { ItemType::ExternCrate } else { ItemType::Module };
|
||||
match self.crate_paths_index.entry((ty, path.to_vec())) {
|
||||
Entry::Occupied(index) => *index.get(),
|
||||
Entry::Vacant(slot) => {
|
||||
slot.insert(self.path_data.len());
|
||||
let (name, module_path) = path.split_last().unwrap();
|
||||
self.push_path(
|
||||
name.as_str().to_string(),
|
||||
PathData { ty, module_path: module_path.to_vec(), exact_module_path: None },
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn union(mut self, other: &SerializedSearchIndex) -> SerializedSearchIndex {
|
||||
let other_entryid_offset = self.names.len();
|
||||
let mut map_other_pathid_to_self_pathid: Vec<usize> = Vec::new();
|
||||
let mut skips = FxHashSet::default();
|
||||
for (other_pathid, other_path_data) in other.path_data.iter().enumerate() {
|
||||
if let Some(other_path_data) = other_path_data {
|
||||
let mut fqp = other_path_data.module_path.clone();
|
||||
let name = Symbol::intern(&other.names[other_pathid]);
|
||||
fqp.push(name);
|
||||
let self_pathid = other_entryid_offset + other_pathid;
|
||||
let self_pathid = match self.crate_paths_index.entry((other_path_data.ty, fqp)) {
|
||||
Entry::Vacant(slot) => {
|
||||
slot.insert(self_pathid);
|
||||
self_pathid
|
||||
}
|
||||
Entry::Occupied(existing_entryid) => {
|
||||
skips.insert(other_pathid);
|
||||
let self_pathid = *existing_entryid.get();
|
||||
let new_type_data = match (
|
||||
self.type_data[self_pathid].take(),
|
||||
other.type_data[other_pathid].as_ref(),
|
||||
) {
|
||||
(Some(self_type_data), None) => Some(self_type_data),
|
||||
(None, Some(other_type_data)) => Some(TypeData {
|
||||
search_unbox: other_type_data.search_unbox,
|
||||
inverted_function_signature_index: other_type_data
|
||||
.inverted_function_signature_index
|
||||
.iter()
|
||||
.cloned()
|
||||
.map(|mut list: Vec<u32>| {
|
||||
for fnid in &mut list {
|
||||
assert!(
|
||||
other.function_data
|
||||
[usize::try_from(*fnid).unwrap()]
|
||||
.is_some(),
|
||||
);
|
||||
// this is valid because we call `self.push()` once, exactly, for every entry,
|
||||
// even if we're just pushing a tombstone
|
||||
*fnid += u32::try_from(other_entryid_offset).unwrap();
|
||||
}
|
||||
list
|
||||
})
|
||||
.collect(),
|
||||
}),
|
||||
(Some(mut self_type_data), Some(other_type_data)) => {
|
||||
for (size, other_list) in other_type_data
|
||||
.inverted_function_signature_index
|
||||
.iter()
|
||||
.enumerate()
|
||||
{
|
||||
while self_type_data.inverted_function_signature_index.len()
|
||||
<= size
|
||||
{
|
||||
self_type_data
|
||||
.inverted_function_signature_index
|
||||
.push(Vec::new());
|
||||
}
|
||||
self_type_data.inverted_function_signature_index[size].extend(
|
||||
other_list.iter().copied().map(|fnid| {
|
||||
assert!(
|
||||
other.function_data[usize::try_from(fnid).unwrap()]
|
||||
.is_some(),
|
||||
);
|
||||
// this is valid because we call `self.push()` once, exactly, for every entry,
|
||||
// even if we're just pushing a tombstone
|
||||
fnid + u32::try_from(other_entryid_offset).unwrap()
|
||||
}),
|
||||
)
|
||||
}
|
||||
Some(self_type_data)
|
||||
}
|
||||
(None, None) => None,
|
||||
};
|
||||
self.type_data[self_pathid] = new_type_data;
|
||||
self_pathid
|
||||
}
|
||||
};
|
||||
map_other_pathid_to_self_pathid.push(self_pathid);
|
||||
} else {
|
||||
// if this gets used, we want it to crash
|
||||
// this should be impossible as a valid index, since some of the
|
||||
// memory must be used for stuff other than the list
|
||||
map_other_pathid_to_self_pathid.push(!0);
|
||||
}
|
||||
}
|
||||
for other_entryid in 0..other.names.len() {
|
||||
if skips.contains(&other_entryid) {
|
||||
// we push tombstone entries to keep the IDs lined up
|
||||
self.push(String::new(), None, None, String::new(), None, None, None);
|
||||
} else {
|
||||
self.push(
|
||||
other.names[other_entryid].clone(),
|
||||
other.path_data[other_entryid].clone(),
|
||||
other.entry_data[other_entryid].as_ref().map(|other_entry_data| EntryData {
|
||||
parent: other_entry_data
|
||||
.parent
|
||||
.map(|parent| map_other_pathid_to_self_pathid[parent])
|
||||
.clone(),
|
||||
module_path: other_entry_data
|
||||
.module_path
|
||||
.map(|path| map_other_pathid_to_self_pathid[path])
|
||||
.clone(),
|
||||
exact_module_path: other_entry_data
|
||||
.exact_module_path
|
||||
.map(|exact_path| map_other_pathid_to_self_pathid[exact_path])
|
||||
.clone(),
|
||||
krate: map_other_pathid_to_self_pathid[other_entry_data.krate],
|
||||
..other_entry_data.clone()
|
||||
}),
|
||||
other.descs[other_entryid].clone(),
|
||||
other.function_data[other_entryid].as_ref().map(|function_data| FunctionData {
|
||||
function_signature: {
|
||||
let (mut func, _offset) =
|
||||
IndexItemFunctionType::read_from_string_without_param_names(
|
||||
function_data.function_signature.as_bytes(),
|
||||
);
|
||||
fn map_fn_sig_item(
|
||||
map_other_pathid_to_self_pathid: &mut Vec<usize>,
|
||||
ty: &mut RenderType,
|
||||
) {
|
||||
match ty.id {
|
||||
None => {}
|
||||
Some(RenderTypeId::Index(generic)) if generic < 0 => {}
|
||||
Some(RenderTypeId::Index(id)) => {
|
||||
let id = usize::try_from(id).unwrap();
|
||||
let id = map_other_pathid_to_self_pathid[id];
|
||||
assert!(id != !0);
|
||||
ty.id =
|
||||
Some(RenderTypeId::Index(isize::try_from(id).unwrap()));
|
||||
}
|
||||
_ => unreachable!(),
|
||||
}
|
||||
if let Some(generics) = &mut ty.generics {
|
||||
for generic in generics {
|
||||
map_fn_sig_item(map_other_pathid_to_self_pathid, generic);
|
||||
}
|
||||
}
|
||||
if let Some(bindings) = &mut ty.bindings {
|
||||
for (param, constraints) in bindings {
|
||||
*param = match *param {
|
||||
param @ RenderTypeId::Index(generic) if generic < 0 => {
|
||||
param
|
||||
}
|
||||
RenderTypeId::Index(id) => {
|
||||
let id = usize::try_from(id).unwrap();
|
||||
let id = map_other_pathid_to_self_pathid[id];
|
||||
assert!(id != !0);
|
||||
RenderTypeId::Index(isize::try_from(id).unwrap())
|
||||
}
|
||||
_ => unreachable!(),
|
||||
};
|
||||
for constraint in constraints {
|
||||
map_fn_sig_item(
|
||||
map_other_pathid_to_self_pathid,
|
||||
constraint,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
for input in &mut func.inputs {
|
||||
map_fn_sig_item(&mut map_other_pathid_to_self_pathid, input);
|
||||
}
|
||||
for output in &mut func.output {
|
||||
map_fn_sig_item(&mut map_other_pathid_to_self_pathid, output);
|
||||
}
|
||||
for clause in &mut func.where_clause {
|
||||
for entry in clause {
|
||||
map_fn_sig_item(&mut map_other_pathid_to_self_pathid, entry);
|
||||
}
|
||||
}
|
||||
let mut result =
|
||||
String::with_capacity(function_data.function_signature.len());
|
||||
func.write_to_string_without_param_names(&mut result);
|
||||
result
|
||||
},
|
||||
param_names: function_data.param_names.clone(),
|
||||
}),
|
||||
other.type_data[other_entryid].as_ref().map(|type_data| TypeData {
|
||||
inverted_function_signature_index: type_data
|
||||
.inverted_function_signature_index
|
||||
.iter()
|
||||
.cloned()
|
||||
.map(|mut list| {
|
||||
for fnid in &mut list {
|
||||
assert!(
|
||||
other.function_data[usize::try_from(*fnid).unwrap()]
|
||||
.is_some(),
|
||||
);
|
||||
// this is valid because we call `self.push()` once, exactly, for every entry,
|
||||
// even if we're just pushing a tombstone
|
||||
*fnid += u32::try_from(other_entryid_offset).unwrap();
|
||||
}
|
||||
list
|
||||
})
|
||||
.collect(),
|
||||
search_unbox: type_data.search_unbox,
|
||||
}),
|
||||
other.alias_pointers[other_entryid]
|
||||
.map(|alias_pointer| alias_pointer + other_entryid_offset),
|
||||
);
|
||||
}
|
||||
}
|
||||
for (i, other_generic_inverted_index) in other.generic_inverted_index.iter().enumerate() {
|
||||
for (size, other_list) in other_generic_inverted_index.iter().enumerate() {
|
||||
let self_generic_inverted_index = match self.generic_inverted_index.get_mut(i) {
|
||||
Some(self_generic_inverted_index) => self_generic_inverted_index,
|
||||
None => {
|
||||
self.generic_inverted_index.push(Vec::new());
|
||||
self.generic_inverted_index.last_mut().unwrap()
|
||||
}
|
||||
};
|
||||
while self_generic_inverted_index.len() <= size {
|
||||
self_generic_inverted_index.push(Vec::new());
|
||||
}
|
||||
self_generic_inverted_index[size].extend(
|
||||
other_list
|
||||
.iter()
|
||||
.copied()
|
||||
.map(|fnid| fnid + u32::try_from(other_entryid_offset).unwrap()),
|
||||
);
|
||||
}
|
||||
}
|
||||
self
|
||||
}
|
||||
|
||||
pub(crate) fn sort(self) -> SerializedSearchIndex {
|
||||
let mut idlist: Vec<usize> = (0..self.names.len()).collect();
|
||||
// nameless entries are tombstones, and will be removed after sorting
|
||||
// sort shorter names first, so that we can present them in order out of search.js
|
||||
idlist.sort_by_key(|&id| {
|
||||
(
|
||||
self.names[id].is_empty(),
|
||||
self.names[id].len(),
|
||||
&self.names[id],
|
||||
self.entry_data[id].as_ref().map_or("", |entry| self.names[entry.krate].as_str()),
|
||||
self.path_data[id].as_ref().map_or(&[][..], |entry| &entry.module_path[..]),
|
||||
)
|
||||
});
|
||||
let map = FxHashMap::from_iter(
|
||||
idlist.iter().enumerate().map(|(new_id, &old_id)| (old_id, new_id)),
|
||||
);
|
||||
let mut new = SerializedSearchIndex::default();
|
||||
for &id in &idlist {
|
||||
if self.names[id].is_empty() {
|
||||
break;
|
||||
}
|
||||
new.push(
|
||||
self.names[id].clone(),
|
||||
self.path_data[id].clone(),
|
||||
self.entry_data[id].as_ref().map(
|
||||
|EntryData {
|
||||
krate,
|
||||
ty,
|
||||
module_path,
|
||||
exact_module_path,
|
||||
parent,
|
||||
deprecated,
|
||||
associated_item_disambiguator,
|
||||
}| EntryData {
|
||||
krate: *map.get(krate).unwrap(),
|
||||
ty: *ty,
|
||||
module_path: module_path.and_then(|path_id| map.get(&path_id).copied()),
|
||||
exact_module_path: exact_module_path
|
||||
.and_then(|path_id| map.get(&path_id).copied()),
|
||||
parent: parent.and_then(|path_id| map.get(&path_id).copied()),
|
||||
deprecated: *deprecated,
|
||||
associated_item_disambiguator: associated_item_disambiguator.clone(),
|
||||
},
|
||||
),
|
||||
self.descs[id].clone(),
|
||||
self.function_data[id].as_ref().map(
|
||||
|FunctionData { function_signature, param_names }| FunctionData {
|
||||
function_signature: {
|
||||
let (mut func, _offset) =
|
||||
IndexItemFunctionType::read_from_string_without_param_names(
|
||||
function_signature.as_bytes(),
|
||||
);
|
||||
fn map_fn_sig_item(map: &FxHashMap<usize, usize>, ty: &mut RenderType) {
|
||||
match ty.id {
|
||||
None => {}
|
||||
Some(RenderTypeId::Index(generic)) if generic < 0 => {}
|
||||
Some(RenderTypeId::Index(id)) => {
|
||||
let id = usize::try_from(id).unwrap();
|
||||
let id = *map.get(&id).unwrap();
|
||||
assert!(id != !0);
|
||||
ty.id =
|
||||
Some(RenderTypeId::Index(isize::try_from(id).unwrap()));
|
||||
}
|
||||
_ => unreachable!(),
|
||||
}
|
||||
if let Some(generics) = &mut ty.generics {
|
||||
for generic in generics {
|
||||
map_fn_sig_item(map, generic);
|
||||
}
|
||||
}
|
||||
if let Some(bindings) = &mut ty.bindings {
|
||||
for (param, constraints) in bindings {
|
||||
*param = match *param {
|
||||
param @ RenderTypeId::Index(generic) if generic < 0 => {
|
||||
param
|
||||
}
|
||||
RenderTypeId::Index(id) => {
|
||||
let id = usize::try_from(id).unwrap();
|
||||
let id = *map.get(&id).unwrap();
|
||||
assert!(id != !0);
|
||||
RenderTypeId::Index(isize::try_from(id).unwrap())
|
||||
}
|
||||
_ => unreachable!(),
|
||||
};
|
||||
for constraint in constraints {
|
||||
map_fn_sig_item(map, constraint);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
for input in &mut func.inputs {
|
||||
map_fn_sig_item(&map, input);
|
||||
}
|
||||
for output in &mut func.output {
|
||||
map_fn_sig_item(&map, output);
|
||||
}
|
||||
for clause in &mut func.where_clause {
|
||||
for entry in clause {
|
||||
map_fn_sig_item(&map, entry);
|
||||
}
|
||||
}
|
||||
let mut result = String::with_capacity(function_signature.len());
|
||||
func.write_to_string_without_param_names(&mut result);
|
||||
result
|
||||
},
|
||||
param_names: param_names.clone(),
|
||||
},
|
||||
),
|
||||
self.type_data[id].as_ref().map(
|
||||
|TypeData { search_unbox, inverted_function_signature_index }| {
|
||||
let inverted_function_signature_index: Vec<Vec<u32>> =
|
||||
inverted_function_signature_index
|
||||
.iter()
|
||||
.cloned()
|
||||
.map(|mut list| {
|
||||
for id in &mut list {
|
||||
*id = u32::try_from(
|
||||
*map.get(&usize::try_from(*id).unwrap()).unwrap(),
|
||||
)
|
||||
.unwrap();
|
||||
}
|
||||
list.sort();
|
||||
list
|
||||
})
|
||||
.collect();
|
||||
TypeData { search_unbox: *search_unbox, inverted_function_signature_index }
|
||||
},
|
||||
),
|
||||
self.alias_pointers[id].and_then(|alias| map.get(&alias).copied()),
|
||||
);
|
||||
}
|
||||
new.generic_inverted_index = self
|
||||
.generic_inverted_index
|
||||
.into_iter()
|
||||
.map(|mut postings| {
|
||||
for list in postings.iter_mut() {
|
||||
let mut new_list: Vec<u32> = list
|
||||
.iter()
|
||||
.copied()
|
||||
.filter_map(|id| u32::try_from(*map.get(&usize::try_from(id).ok()?)?).ok())
|
||||
.collect();
|
||||
new_list.sort();
|
||||
*list = new_list;
|
||||
}
|
||||
postings
|
||||
})
|
||||
.collect();
|
||||
new
|
||||
}
|
||||
|
||||
pub(crate) fn write_to(self, doc_root: &Path, resource_suffix: &str) -> Result<(), Error> {
|
||||
let SerializedSearchIndex {
|
||||
names,
|
||||
path_data,
|
||||
entry_data,
|
||||
descs,
|
||||
function_data,
|
||||
type_data,
|
||||
alias_pointers,
|
||||
generic_inverted_index,
|
||||
crate_paths_index: _,
|
||||
} = self;
|
||||
let mut serialized_root = Vec::new();
|
||||
serialized_root.extend_from_slice(br#"rr_('{"normalizedName":{"I":""#);
|
||||
let normalized_names = names
|
||||
.iter()
|
||||
.map(|name| {
|
||||
if name.contains("_") {
|
||||
name.replace("_", "").to_ascii_lowercase()
|
||||
} else {
|
||||
name.to_ascii_lowercase()
|
||||
}
|
||||
})
|
||||
.collect::<Vec<String>>();
|
||||
let names_search_tree = stringdex_internals::tree::encode_search_tree_ukkonen(
|
||||
normalized_names.iter().map(|name| name.as_bytes()),
|
||||
);
|
||||
let dir_path = doc_root.join(format!("search.index/"));
|
||||
let _ = std::fs::remove_dir_all(&dir_path); // if already missing, no problem
|
||||
stringdex_internals::write_tree_to_disk(
|
||||
&names_search_tree,
|
||||
&dir_path,
|
||||
&mut serialized_root,
|
||||
)
|
||||
.map_err(|error| Error {
|
||||
file: dir_path,
|
||||
error: format!("failed to write name tree to disk: {error}"),
|
||||
})?;
|
||||
std::mem::drop(names_search_tree);
|
||||
serialized_root.extend_from_slice(br#"","#);
|
||||
serialized_root.extend_from_slice(&perform_write_strings(
|
||||
doc_root,
|
||||
"normalizedName",
|
||||
normalized_names.into_iter(),
|
||||
)?);
|
||||
serialized_root.extend_from_slice(br#"},"crateNames":{"#);
|
||||
let mut crates: Vec<&[u8]> = entry_data
|
||||
.iter()
|
||||
.filter_map(|entry_data| Some(names[entry_data.as_ref()?.krate].as_bytes()))
|
||||
.collect();
|
||||
crates.sort();
|
||||
crates.dedup();
|
||||
serialized_root.extend_from_slice(&perform_write_strings(
|
||||
doc_root,
|
||||
"crateNames",
|
||||
crates.into_iter(),
|
||||
)?);
|
||||
serialized_root.extend_from_slice(br#"},"name":{"#);
|
||||
serialized_root.extend_from_slice(&perform_write_strings(doc_root, "name", names.iter())?);
|
||||
serialized_root.extend_from_slice(br#"},"path":{"#);
|
||||
serialized_root.extend_from_slice(&perform_write_serde(doc_root, "path", path_data)?);
|
||||
serialized_root.extend_from_slice(br#"},"entry":{"#);
|
||||
serialized_root.extend_from_slice(&perform_write_serde(doc_root, "entry", entry_data)?);
|
||||
serialized_root.extend_from_slice(br#"},"desc":{"#);
|
||||
serialized_root.extend_from_slice(&perform_write_strings(
|
||||
doc_root,
|
||||
"desc",
|
||||
descs.into_iter(),
|
||||
)?);
|
||||
serialized_root.extend_from_slice(br#"},"function":{"#);
|
||||
serialized_root.extend_from_slice(&perform_write_serde(
|
||||
doc_root,
|
||||
"function",
|
||||
function_data,
|
||||
)?);
|
||||
serialized_root.extend_from_slice(br#"},"type":{"#);
|
||||
serialized_root.extend_from_slice(&perform_write_serde(doc_root, "type", type_data)?);
|
||||
serialized_root.extend_from_slice(br#"},"alias":{"#);
|
||||
serialized_root.extend_from_slice(&perform_write_serde(doc_root, "alias", alias_pointers)?);
|
||||
serialized_root.extend_from_slice(br#"},"generic_inverted_index":{"#);
|
||||
serialized_root.extend_from_slice(&perform_write_postings(
|
||||
doc_root,
|
||||
"generic_inverted_index",
|
||||
generic_inverted_index,
|
||||
)?);
|
||||
serialized_root.extend_from_slice(br#"}}')"#);
|
||||
fn perform_write_strings(
|
||||
doc_root: &Path,
|
||||
dirname: &str,
|
||||
mut column: impl Iterator<Item = impl AsRef<[u8]> + Clone> + ExactSizeIterator,
|
||||
) -> Result<Vec<u8>, Error> {
|
||||
let dir_path = doc_root.join(format!("search.index/{dirname}"));
|
||||
stringdex_internals::write_data_to_disk(&mut column, &dir_path).map_err(|error| Error {
|
||||
file: dir_path,
|
||||
error: format!("failed to write column to disk: {error}"),
|
||||
})
|
||||
}
|
||||
fn perform_write_serde(
|
||||
doc_root: &Path,
|
||||
dirname: &str,
|
||||
column: Vec<Option<impl Serialize>>,
|
||||
) -> Result<Vec<u8>, Error> {
|
||||
perform_write_strings(
|
||||
doc_root,
|
||||
dirname,
|
||||
column.into_iter().map(|value| {
|
||||
if let Some(value) = value {
|
||||
serde_json::to_vec(&value).unwrap()
|
||||
} else {
|
||||
Vec::new()
|
||||
}
|
||||
}),
|
||||
)
|
||||
}
|
||||
fn perform_write_postings(
|
||||
doc_root: &Path,
|
||||
dirname: &str,
|
||||
column: Vec<Vec<Vec<u32>>>,
|
||||
) -> Result<Vec<u8>, Error> {
|
||||
perform_write_strings(
|
||||
doc_root,
|
||||
dirname,
|
||||
column.into_iter().map(|postings| {
|
||||
let mut buf = Vec::new();
|
||||
encode::write_postings_to_string(&postings, &mut buf);
|
||||
buf
|
||||
}),
|
||||
)
|
||||
}
|
||||
std::fs::write(
|
||||
doc_root.join(format!("search.index/root{resource_suffix}.js")),
|
||||
serialized_root,
|
||||
)
|
||||
.map_err(|error| Error {
|
||||
file: doc_root.join(format!("search.index/root{resource_suffix}.js")),
|
||||
error: format!("failed to write root to disk: {error}"),
|
||||
})?;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
struct EntryData {
|
||||
krate: usize,
|
||||
ty: ItemType,
|
||||
module_path: Option<usize>,
|
||||
exact_module_path: Option<usize>,
|
||||
parent: Option<usize>,
|
||||
deprecated: bool,
|
||||
associated_item_disambiguator: Option<String>,
|
||||
}
|
||||
|
||||
impl Serialize for EntryData {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
let mut seq = serializer.serialize_seq(None)?;
|
||||
seq.serialize_element(&self.krate)?;
|
||||
seq.serialize_element(&self.ty)?;
|
||||
seq.serialize_element(&self.module_path.map(|id| id + 1).unwrap_or(0))?;
|
||||
seq.serialize_element(&self.exact_module_path.map(|id| id + 1).unwrap_or(0))?;
|
||||
seq.serialize_element(&self.parent.map(|id| id + 1).unwrap_or(0))?;
|
||||
seq.serialize_element(&if self.deprecated { 1 } else { 0 })?;
|
||||
if let Some(disambig) = &self.associated_item_disambiguator {
|
||||
seq.serialize_element(&disambig)?;
|
||||
}
|
||||
seq.end()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'de> Deserialize<'de> for EntryData {
|
||||
fn deserialize<D>(deserializer: D) -> Result<EntryData, D::Error>
|
||||
where
|
||||
D: Deserializer<'de>,
|
||||
{
|
||||
struct EntryDataVisitor;
|
||||
impl<'de> de::Visitor<'de> for EntryDataVisitor {
|
||||
type Value = EntryData;
|
||||
fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(formatter, "path data")
|
||||
}
|
||||
fn visit_seq<A: de::SeqAccess<'de>>(self, mut v: A) -> Result<EntryData, A::Error> {
|
||||
let krate: usize =
|
||||
v.next_element()?.ok_or_else(|| A::Error::missing_field("krate"))?;
|
||||
let ty: ItemType =
|
||||
v.next_element()?.ok_or_else(|| A::Error::missing_field("ty"))?;
|
||||
let module_path: SerializedOptional32 =
|
||||
v.next_element()?.ok_or_else(|| A::Error::missing_field("module_path"))?;
|
||||
let exact_module_path: SerializedOptional32 = v
|
||||
.next_element()?
|
||||
.ok_or_else(|| A::Error::missing_field("exact_module_path"))?;
|
||||
let parent: SerializedOptional32 =
|
||||
v.next_element()?.ok_or_else(|| A::Error::missing_field("parent"))?;
|
||||
let deprecated: u32 = v.next_element()?.unwrap_or(0);
|
||||
let associated_item_disambiguator: Option<String> = v.next_element()?;
|
||||
Ok(EntryData {
|
||||
krate,
|
||||
ty,
|
||||
module_path: Option::<i32>::from(module_path).map(|path| path as usize),
|
||||
exact_module_path: Option::<i32>::from(exact_module_path)
|
||||
.map(|path| path as usize),
|
||||
parent: Option::<i32>::from(parent).map(|path| path as usize),
|
||||
deprecated: deprecated != 0,
|
||||
associated_item_disambiguator,
|
||||
})
|
||||
}
|
||||
}
|
||||
deserializer.deserialize_any(EntryDataVisitor)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
struct PathData {
|
||||
ty: ItemType,
|
||||
module_path: Vec<Symbol>,
|
||||
exact_module_path: Option<Vec<Symbol>>,
|
||||
}
|
||||
|
||||
impl Serialize for PathData {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
let mut seq = serializer.serialize_seq(None)?;
|
||||
seq.serialize_element(&self.ty)?;
|
||||
seq.serialize_element(&if self.module_path.is_empty() {
|
||||
String::new()
|
||||
} else {
|
||||
join_path_syms(&self.module_path)
|
||||
})?;
|
||||
if let Some(ref path) = self.exact_module_path {
|
||||
seq.serialize_element(&if path.is_empty() {
|
||||
String::new()
|
||||
} else {
|
||||
join_path_syms(path)
|
||||
})?;
|
||||
}
|
||||
seq.end()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'de> Deserialize<'de> for PathData {
|
||||
fn deserialize<D>(deserializer: D) -> Result<PathData, D::Error>
|
||||
where
|
||||
D: Deserializer<'de>,
|
||||
{
|
||||
struct PathDataVisitor;
|
||||
impl<'de> de::Visitor<'de> for PathDataVisitor {
|
||||
type Value = PathData;
|
||||
fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(formatter, "path data")
|
||||
}
|
||||
fn visit_seq<A: de::SeqAccess<'de>>(self, mut v: A) -> Result<PathData, A::Error> {
|
||||
let ty: ItemType =
|
||||
v.next_element()?.ok_or_else(|| A::Error::missing_field("ty"))?;
|
||||
let module_path: String =
|
||||
v.next_element()?.ok_or_else(|| A::Error::missing_field("module_path"))?;
|
||||
let exact_module_path: Option<String> =
|
||||
v.next_element()?.and_then(SerializedOptionalString::into);
|
||||
Ok(PathData {
|
||||
ty,
|
||||
module_path: if module_path.is_empty() {
|
||||
vec![]
|
||||
} else {
|
||||
module_path.split("::").map(Symbol::intern).collect()
|
||||
},
|
||||
exact_module_path: exact_module_path.map(|path| {
|
||||
if path.is_empty() {
|
||||
vec![]
|
||||
} else {
|
||||
path.split("::").map(Symbol::intern).collect()
|
||||
}
|
||||
}),
|
||||
})
|
||||
}
|
||||
}
|
||||
deserializer.deserialize_any(PathDataVisitor)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
struct TypeData {
|
||||
/// If set to "true", the generics can be matched without having to
|
||||
/// mention the type itself. The truth table, assuming `Unboxable`
|
||||
/// has `search_unbox = true` and `Inner` has `search_unbox = false`
|
||||
///
|
||||
/// | **query** | `Unboxable<Inner>` | `Inner` | `Inner<Unboxable>` |
|
||||
/// |--------------------|--------------------|---------|--------------------|
|
||||
/// | `Inner` | yes | yes | yes |
|
||||
/// | `Unboxable` | yes | no | no |
|
||||
/// | `Unboxable<Inner>` | yes | no | no |
|
||||
/// | `Inner<Unboxable>` | no | no | yes |
|
||||
search_unbox: bool,
|
||||
/// List of functions that mention this type in their type signature.
|
||||
///
|
||||
/// - The outermost list has one entry per alpha-normalized generic.
|
||||
///
|
||||
/// - The second layer is sorted by number of types that appear in the
|
||||
/// type signature. The search engine iterates over these in order from
|
||||
/// smallest to largest. Functions with less stuff in their type
|
||||
/// signature are more likely to be what the user wants, because we never
|
||||
/// show functions that are *missing* parts of the query, so removing..
|
||||
///
|
||||
/// - The final layer is the list of functions.
|
||||
inverted_function_signature_index: Vec<Vec<u32>>,
|
||||
}
|
||||
|
||||
impl Serialize for TypeData {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
if self.search_unbox || !self.inverted_function_signature_index.is_empty() {
|
||||
let mut seq = serializer.serialize_seq(None)?;
|
||||
if !self.inverted_function_signature_index.is_empty() {
|
||||
let mut buf = Vec::new();
|
||||
encode::write_postings_to_string(&self.inverted_function_signature_index, &mut buf);
|
||||
let mut serialized_result = Vec::new();
|
||||
stringdex_internals::encode::write_base64_to_bytes(&buf, &mut serialized_result);
|
||||
seq.serialize_element(&String::from_utf8(serialized_result).unwrap())?;
|
||||
}
|
||||
if self.search_unbox {
|
||||
seq.serialize_element(&1)?;
|
||||
}
|
||||
seq.end()
|
||||
} else {
|
||||
None::<()>.serialize(serializer)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'de> Deserialize<'de> for TypeData {
|
||||
fn deserialize<D>(deserializer: D) -> Result<TypeData, D::Error>
|
||||
where
|
||||
D: Deserializer<'de>,
|
||||
{
|
||||
struct TypeDataVisitor;
|
||||
impl<'de> de::Visitor<'de> for TypeDataVisitor {
|
||||
type Value = TypeData;
|
||||
fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(formatter, "type data")
|
||||
}
|
||||
fn visit_none<E>(self) -> Result<TypeData, E> {
|
||||
Ok(TypeData { inverted_function_signature_index: vec![], search_unbox: false })
|
||||
}
|
||||
fn visit_seq<A: de::SeqAccess<'de>>(self, mut v: A) -> Result<TypeData, A::Error> {
|
||||
let inverted_function_signature_index: String =
|
||||
v.next_element()?.unwrap_or(String::new());
|
||||
let search_unbox: u32 = v.next_element()?.unwrap_or(0);
|
||||
let mut idx: Vec<u8> = Vec::new();
|
||||
stringdex_internals::decode::read_base64_from_bytes(
|
||||
inverted_function_signature_index.as_bytes(),
|
||||
&mut idx,
|
||||
)
|
||||
.unwrap();
|
||||
let mut inverted_function_signature_index = Vec::new();
|
||||
encode::read_postings_from_string(&mut inverted_function_signature_index, &idx);
|
||||
Ok(TypeData { inverted_function_signature_index, search_unbox: search_unbox == 1 })
|
||||
}
|
||||
}
|
||||
deserializer.deserialize_any(TypeDataVisitor)
|
||||
}
|
||||
}
|
||||
|
||||
enum SerializedOptionalString {
|
||||
None,
|
||||
Some(String),
|
||||
}
|
||||
|
||||
impl From<SerializedOptionalString> for Option<String> {
|
||||
fn from(me: SerializedOptionalString) -> Option<String> {
|
||||
match me {
|
||||
SerializedOptionalString::Some(string) => Some(string),
|
||||
SerializedOptionalString::None => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Serialize for SerializedOptionalString {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
match self {
|
||||
SerializedOptionalString::Some(string) => string.serialize(serializer),
|
||||
SerializedOptionalString::None => 0.serialize(serializer),
|
||||
}
|
||||
}
|
||||
}
|
||||
impl<'de> Deserialize<'de> for SerializedOptionalString {
|
||||
fn deserialize<D>(deserializer: D) -> Result<SerializedOptionalString, D::Error>
|
||||
where
|
||||
D: Deserializer<'de>,
|
||||
{
|
||||
struct SerializedOptionalStringVisitor;
|
||||
impl<'de> de::Visitor<'de> for SerializedOptionalStringVisitor {
|
||||
type Value = SerializedOptionalString;
|
||||
fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(formatter, "0 or string")
|
||||
}
|
||||
fn visit_u64<E: de::Error>(self, v: u64) -> Result<SerializedOptionalString, E> {
|
||||
if v != 0 {
|
||||
return Err(E::missing_field("not 0"));
|
||||
}
|
||||
Ok(SerializedOptionalString::None)
|
||||
}
|
||||
fn visit_string<E: de::Error>(self, v: String) -> Result<SerializedOptionalString, E> {
|
||||
Ok(SerializedOptionalString::Some(v))
|
||||
}
|
||||
fn visit_str<E: de::Error>(self, v: &str) -> Result<SerializedOptionalString, E> {
|
||||
Ok(SerializedOptionalString::Some(v.to_string()))
|
||||
}
|
||||
}
|
||||
deserializer.deserialize_any(SerializedOptionalStringVisitor)
|
||||
}
|
||||
}
|
||||
|
||||
enum SerializedOptional32 {
|
||||
None,
|
||||
Some(i32),
|
||||
}
|
||||
|
||||
impl From<SerializedOptional32> for Option<i32> {
|
||||
fn from(me: SerializedOptional32) -> Option<i32> {
|
||||
match me {
|
||||
SerializedOptional32::Some(number) => Some(number),
|
||||
SerializedOptional32::None => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Serialize for SerializedOptional32 {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
match self {
|
||||
&SerializedOptional32::Some(number) if number < 0 => number.serialize(serializer),
|
||||
&SerializedOptional32::Some(number) => (number + 1).serialize(serializer),
|
||||
&SerializedOptional32::None => 0.serialize(serializer),
|
||||
}
|
||||
}
|
||||
}
|
||||
impl<'de> Deserialize<'de> for SerializedOptional32 {
|
||||
fn deserialize<D>(deserializer: D) -> Result<SerializedOptional32, D::Error>
|
||||
where
|
||||
D: Deserializer<'de>,
|
||||
{
|
||||
struct SerializedOptional32Visitor;
|
||||
impl<'de> de::Visitor<'de> for SerializedOptional32Visitor {
|
||||
type Value = SerializedOptional32;
|
||||
fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(formatter, "integer")
|
||||
}
|
||||
fn visit_i64<E: de::Error>(self, v: i64) -> Result<SerializedOptional32, E> {
|
||||
Ok(match v {
|
||||
0 => SerializedOptional32::None,
|
||||
v if v < 0 => SerializedOptional32::Some(v as i32),
|
||||
v => SerializedOptional32::Some(v as i32 - 1),
|
||||
})
|
||||
}
|
||||
fn visit_u64<E: de::Error>(self, v: u64) -> Result<SerializedOptional32, E> {
|
||||
Ok(match v {
|
||||
0 => SerializedOptional32::None,
|
||||
v => SerializedOptional32::Some(v as i32 - 1),
|
||||
})
|
||||
}
|
||||
}
|
||||
deserializer.deserialize_any(SerializedOptional32Visitor)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct FunctionData {
|
||||
function_signature: String,
|
||||
param_names: Vec<String>,
|
||||
}
|
||||
|
||||
impl Serialize for FunctionData {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
let mut seq = serializer.serialize_seq(None)?;
|
||||
seq.serialize_element(&self.function_signature)?;
|
||||
seq.serialize_element(&self.param_names)?;
|
||||
seq.end()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'de> Deserialize<'de> for FunctionData {
|
||||
fn deserialize<D>(deserializer: D) -> Result<FunctionData, D::Error>
|
||||
where
|
||||
D: Deserializer<'de>,
|
||||
{
|
||||
struct FunctionDataVisitor;
|
||||
impl<'de> de::Visitor<'de> for FunctionDataVisitor {
|
||||
type Value = FunctionData;
|
||||
fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(formatter, "fn data")
|
||||
}
|
||||
fn visit_seq<A: de::SeqAccess<'de>>(self, mut v: A) -> Result<FunctionData, A::Error> {
|
||||
let function_signature: String = v
|
||||
.next_element()?
|
||||
.ok_or_else(|| A::Error::missing_field("function_signature"))?;
|
||||
let param_names: Vec<String> =
|
||||
v.next_element()?.ok_or_else(|| A::Error::missing_field("param_names"))?;
|
||||
Ok(FunctionData { function_signature, param_names })
|
||||
}
|
||||
}
|
||||
deserializer.deserialize_any(FunctionDataVisitor)
|
||||
}
|
||||
}
|
||||
|
||||
/// Builds the search index from the collected metadata
|
||||
pub(crate) fn build_index(
|
||||
krate: &clean::Crate,
|
||||
cache: &mut Cache,
|
||||
tcx: TyCtxt<'_>,
|
||||
) -> SerializedSearchIndex {
|
||||
// Maps from ID to position in the `crate_paths` array.
|
||||
let mut itemid_to_pathid = FxHashMap::default();
|
||||
let mut primitives = FxHashMap::default();
|
||||
let mut associated_types = FxHashMap::default();
|
||||
|
||||
// item type, display path, re-exported internal path
|
||||
let mut crate_paths: Vec<(ItemType, Vec<Symbol>, Option<Vec<Symbol>>, bool)> = vec![];
|
||||
doc_root: &Path,
|
||||
resource_suffix: &str,
|
||||
) -> Result<SerializedSearchIndex, Error> {
|
||||
let mut search_index = std::mem::take(&mut cache.search_index);
|
||||
|
||||
// Attach all orphan items to the type's definition if the type
|
||||
// has since been learned.
|
||||
@@ -74,15 +1171,15 @@ pub(crate) fn build_index(
|
||||
{
|
||||
if let Some((fqp, _)) = cache.paths.get(&parent) {
|
||||
let desc = short_markdown_summary(&item.doc_value(), &item.link_names(cache));
|
||||
cache.search_index.push(IndexItem {
|
||||
search_index.push(IndexItem {
|
||||
ty: item.type_(),
|
||||
defid: item.item_id.as_def_id(),
|
||||
name: item.name.unwrap(),
|
||||
path: join_path_syms(&fqp[..fqp.len() - 1]),
|
||||
module_path: fqp[..fqp.len() - 1].to_vec(),
|
||||
desc,
|
||||
parent: Some(parent),
|
||||
parent_idx: None,
|
||||
exact_path: None,
|
||||
exact_module_path: None,
|
||||
impl_id,
|
||||
search_type: get_function_type_for_search(
|
||||
item,
|
||||
@@ -97,85 +1194,299 @@ pub(crate) fn build_index(
|
||||
}
|
||||
}
|
||||
|
||||
let crate_doc =
|
||||
short_markdown_summary(&krate.module.doc_value(), &krate.module.link_names(cache));
|
||||
|
||||
#[derive(Eq, Ord, PartialEq, PartialOrd)]
|
||||
struct SerSymbolAsStr(Symbol);
|
||||
|
||||
impl Serialize for SerSymbolAsStr {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
self.0.as_str().serialize(serializer)
|
||||
}
|
||||
}
|
||||
|
||||
type AliasMap = BTreeMap<SerSymbolAsStr, Vec<usize>>;
|
||||
// Aliases added through `#[doc(alias = "...")]`. Since a few items can have the same alias,
|
||||
// we need the alias element to have an array of items.
|
||||
let mut aliases: AliasMap = BTreeMap::new();
|
||||
|
||||
// Sort search index items. This improves the compressibility of the search index.
|
||||
cache.search_index.sort_unstable_by(|k1, k2| {
|
||||
search_index.sort_unstable_by(|k1, k2| {
|
||||
// `sort_unstable_by_key` produces lifetime errors
|
||||
// HACK(rustdoc): should not be sorting `CrateNum` or `DefIndex`, this will soon go away, too
|
||||
let k1 = (&k1.path, k1.name.as_str(), &k1.ty, k1.parent.map(|id| (id.index, id.krate)));
|
||||
let k2 = (&k2.path, k2.name.as_str(), &k2.ty, k2.parent.map(|id| (id.index, id.krate)));
|
||||
let k1 =
|
||||
(&k1.module_path, k1.name.as_str(), &k1.ty, k1.parent.map(|id| (id.index, id.krate)));
|
||||
let k2 =
|
||||
(&k2.module_path, k2.name.as_str(), &k2.ty, k2.parent.map(|id| (id.index, id.krate)));
|
||||
Ord::cmp(&k1, &k2)
|
||||
});
|
||||
|
||||
// Set up alias indexes.
|
||||
for (i, item) in cache.search_index.iter().enumerate() {
|
||||
for alias in &item.aliases[..] {
|
||||
aliases.entry(SerSymbolAsStr(*alias)).or_default().push(i);
|
||||
// Now, convert to an on-disk search index format
|
||||
//
|
||||
// if there's already a search index, load it into memory and add the new entries to it
|
||||
// otherwise, do nothing
|
||||
let mut serialized_index = SerializedSearchIndex::load(doc_root, resource_suffix)?;
|
||||
|
||||
// The crate always goes first in this list
|
||||
let crate_name = krate.name(tcx);
|
||||
let crate_doc =
|
||||
short_markdown_summary(&krate.module.doc_value(), &krate.module.link_names(cache));
|
||||
let crate_idx = {
|
||||
let crate_path = (ItemType::ExternCrate, vec![crate_name]);
|
||||
match serialized_index.crate_paths_index.entry(crate_path) {
|
||||
Entry::Occupied(index) => {
|
||||
let index = *index.get();
|
||||
serialized_index.descs[index] = crate_doc;
|
||||
for type_data in serialized_index.type_data.iter_mut() {
|
||||
if let Some(TypeData { inverted_function_signature_index, .. }) = type_data {
|
||||
for list in &mut inverted_function_signature_index[..] {
|
||||
list.retain(|fnid| {
|
||||
serialized_index.entry_data[usize::try_from(*fnid).unwrap()]
|
||||
.as_ref()
|
||||
.unwrap()
|
||||
.krate
|
||||
!= index
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
for i in (index + 1)..serialized_index.entry_data.len() {
|
||||
// if this crate has been built before, replace its stuff with new
|
||||
if let Some(EntryData { krate, .. }) = serialized_index.entry_data[i]
|
||||
&& krate == index
|
||||
{
|
||||
serialized_index.entry_data[i] = None;
|
||||
serialized_index.descs[i] = String::new();
|
||||
serialized_index.function_data[i] = None;
|
||||
if serialized_index.path_data[i].is_none() {
|
||||
serialized_index.names[i] = String::new();
|
||||
}
|
||||
}
|
||||
if let Some(alias_pointer) = serialized_index.alias_pointers[i]
|
||||
&& serialized_index.entry_data[alias_pointer].is_none()
|
||||
{
|
||||
serialized_index.alias_pointers[i] = None;
|
||||
if serialized_index.path_data[i].is_none()
|
||||
&& serialized_index.entry_data[i].is_none()
|
||||
{
|
||||
serialized_index.names[i] = String::new();
|
||||
}
|
||||
}
|
||||
}
|
||||
index
|
||||
}
|
||||
Entry::Vacant(slot) => {
|
||||
let krate = serialized_index.names.len();
|
||||
slot.insert(krate);
|
||||
serialized_index.push(
|
||||
crate_name.as_str().to_string(),
|
||||
Some(PathData {
|
||||
ty: ItemType::ExternCrate,
|
||||
module_path: vec![],
|
||||
exact_module_path: None,
|
||||
}),
|
||||
Some(EntryData {
|
||||
krate,
|
||||
ty: ItemType::ExternCrate,
|
||||
module_path: None,
|
||||
exact_module_path: None,
|
||||
parent: None,
|
||||
deprecated: false,
|
||||
associated_item_disambiguator: None,
|
||||
}),
|
||||
crate_doc,
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
);
|
||||
krate
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// First, populate associated item parents
|
||||
let crate_items: Vec<&mut IndexItem> = search_index
|
||||
.iter_mut()
|
||||
.map(|item| {
|
||||
item.parent_idx = item.parent.and_then(|defid| {
|
||||
cache.paths.get(&defid).map(|&(ref fqp, ty)| {
|
||||
let pathid = serialized_index.names.len();
|
||||
match serialized_index.crate_paths_index.entry((ty, fqp.clone())) {
|
||||
Entry::Occupied(entry) => *entry.get(),
|
||||
Entry::Vacant(entry) => {
|
||||
entry.insert(pathid);
|
||||
let (name, path) = fqp.split_last().unwrap();
|
||||
serialized_index.push_path(
|
||||
name.as_str().to_string(),
|
||||
PathData {
|
||||
ty,
|
||||
module_path: path.to_vec(),
|
||||
exact_module_path: if let Some(exact_path) =
|
||||
cache.exact_paths.get(&defid)
|
||||
&& let Some((name2, exact_path)) = exact_path.split_last()
|
||||
&& name == name2
|
||||
{
|
||||
Some(exact_path.to_vec())
|
||||
} else {
|
||||
None
|
||||
},
|
||||
},
|
||||
);
|
||||
usize::try_from(pathid).unwrap()
|
||||
}
|
||||
}
|
||||
})
|
||||
});
|
||||
|
||||
if let Some(defid) = item.defid
|
||||
&& item.parent_idx.is_none()
|
||||
{
|
||||
// If this is a re-export, retain the original path.
|
||||
// Associated items don't use this.
|
||||
// Their parent carries the exact fqp instead.
|
||||
let exact_fqp = cache
|
||||
.exact_paths
|
||||
.get(&defid)
|
||||
.or_else(|| cache.external_paths.get(&defid).map(|(fqp, _)| fqp));
|
||||
item.exact_module_path = exact_fqp.and_then(|fqp| {
|
||||
// Re-exports only count if the name is exactly the same.
|
||||
// This is a size optimization, since it means we only need
|
||||
// to store the name once (and the path is re-used for everything
|
||||
// exported from this same module). It's also likely to Do
|
||||
// What I Mean, since if a re-export changes the name, it might
|
||||
// also be a change in semantic meaning.
|
||||
if fqp.last() != Some(&item.name) {
|
||||
return None;
|
||||
}
|
||||
let path =
|
||||
if item.ty == ItemType::Macro && tcx.has_attr(defid, sym::macro_export) {
|
||||
// `#[macro_export]` always exports to the crate root.
|
||||
vec![tcx.crate_name(defid.krate)]
|
||||
} else {
|
||||
if fqp.len() < 2 {
|
||||
return None;
|
||||
}
|
||||
fqp[..fqp.len() - 1].to_vec()
|
||||
};
|
||||
if path == item.module_path {
|
||||
return None;
|
||||
}
|
||||
Some(path)
|
||||
});
|
||||
} else if let Some(parent_idx) = item.parent_idx {
|
||||
let i = usize::try_from(parent_idx).unwrap();
|
||||
item.module_path =
|
||||
serialized_index.path_data[i].as_ref().unwrap().module_path.clone();
|
||||
item.exact_module_path =
|
||||
serialized_index.path_data[i].as_ref().unwrap().exact_module_path.clone();
|
||||
}
|
||||
|
||||
&mut *item
|
||||
})
|
||||
.collect();
|
||||
|
||||
// Now, find anywhere that the same name is used for two different items
|
||||
// these need a disambiguator hash for lints
|
||||
let mut associated_item_duplicates = FxHashMap::<(usize, ItemType, Symbol), usize>::default();
|
||||
for item in crate_items.iter().map(|x| &*x) {
|
||||
if item.impl_id.is_some()
|
||||
&& let Some(parent_idx) = item.parent_idx
|
||||
{
|
||||
let count =
|
||||
associated_item_duplicates.entry((parent_idx, item.ty, item.name)).or_insert(0);
|
||||
*count += 1;
|
||||
}
|
||||
}
|
||||
|
||||
// Reduce `DefId` in paths into smaller sequential numbers,
|
||||
// and prune the paths that do not appear in the index.
|
||||
let mut lastpath = "";
|
||||
let mut lastpathid = 0isize;
|
||||
// now populate the actual entries, type data, and function data
|
||||
for item in crate_items {
|
||||
assert_eq!(
|
||||
item.parent.is_some(),
|
||||
item.parent_idx.is_some(),
|
||||
"`{}` is missing idx",
|
||||
item.name
|
||||
);
|
||||
|
||||
// First, on function signatures
|
||||
let mut search_index = std::mem::take(&mut cache.search_index);
|
||||
for item in search_index.iter_mut() {
|
||||
fn insert_into_map<F: std::hash::Hash + Eq>(
|
||||
map: &mut FxHashMap<F, isize>,
|
||||
itemid: F,
|
||||
lastpathid: &mut isize,
|
||||
crate_paths: &mut Vec<(ItemType, Vec<Symbol>, Option<Vec<Symbol>>, bool)>,
|
||||
item_type: ItemType,
|
||||
let module_path = Some(serialized_index.get_id_by_module_path(&item.module_path));
|
||||
let exact_module_path = item
|
||||
.exact_module_path
|
||||
.as_ref()
|
||||
.map(|path| serialized_index.get_id_by_module_path(path));
|
||||
|
||||
let new_entry_id = serialized_index.push(
|
||||
item.name.as_str().to_string(),
|
||||
None,
|
||||
Some(EntryData {
|
||||
ty: item.ty,
|
||||
parent: item.parent_idx,
|
||||
module_path,
|
||||
exact_module_path,
|
||||
deprecated: item.deprecation.is_some(),
|
||||
associated_item_disambiguator: if let Some(impl_id) = item.impl_id
|
||||
&& let Some(parent_idx) = item.parent_idx
|
||||
&& associated_item_duplicates
|
||||
.get(&(parent_idx, item.ty, item.name))
|
||||
.copied()
|
||||
.unwrap_or(0)
|
||||
> 1
|
||||
{
|
||||
Some(render::get_id_for_impl(tcx, ItemId::DefId(impl_id)))
|
||||
} else {
|
||||
None
|
||||
},
|
||||
krate: crate_idx,
|
||||
}),
|
||||
item.desc.to_string(),
|
||||
None, // filled in after all the types have been indexed
|
||||
None,
|
||||
None,
|
||||
);
|
||||
|
||||
// Aliases
|
||||
// -------
|
||||
for alias in &item.aliases[..] {
|
||||
serialized_index.push_alias(alias.as_str().to_string(), new_entry_id);
|
||||
}
|
||||
|
||||
// Function signature reverse index
|
||||
// --------------------------------
|
||||
fn insert_into_map(
|
||||
ty: ItemType,
|
||||
path: &[Symbol],
|
||||
exact_path: Option<&[Symbol]>,
|
||||
search_unbox: bool,
|
||||
serialized_index: &mut SerializedSearchIndex,
|
||||
used_in_function_signature: &mut BTreeSet<isize>,
|
||||
) -> RenderTypeId {
|
||||
match map.entry(itemid) {
|
||||
Entry::Occupied(entry) => RenderTypeId::Index(*entry.get()),
|
||||
Entry::Vacant(entry) => {
|
||||
let pathid = *lastpathid;
|
||||
entry.insert(pathid);
|
||||
*lastpathid += 1;
|
||||
crate_paths.push((
|
||||
item_type,
|
||||
path.to_vec(),
|
||||
exact_path.map(|path| path.to_vec()),
|
||||
search_unbox,
|
||||
));
|
||||
RenderTypeId::Index(pathid)
|
||||
let pathid = serialized_index.names.len();
|
||||
let pathid = match serialized_index.crate_paths_index.entry((ty, path.to_vec())) {
|
||||
Entry::Occupied(entry) => {
|
||||
let id = *entry.get();
|
||||
if serialized_index.type_data[id].as_mut().is_none() {
|
||||
serialized_index.type_data[id] = Some(TypeData {
|
||||
search_unbox,
|
||||
inverted_function_signature_index: Vec::new(),
|
||||
});
|
||||
} else if search_unbox {
|
||||
serialized_index.type_data[id].as_mut().unwrap().search_unbox = true;
|
||||
}
|
||||
id
|
||||
}
|
||||
}
|
||||
Entry::Vacant(entry) => {
|
||||
entry.insert(pathid);
|
||||
let (name, path) = path.split_last().unwrap();
|
||||
serialized_index.push_type(
|
||||
name.to_string(),
|
||||
PathData {
|
||||
ty,
|
||||
module_path: path.to_vec(),
|
||||
exact_module_path: if let Some(exact_path) = exact_path
|
||||
&& let Some((name2, exact_path)) = exact_path.split_last()
|
||||
&& name == name2
|
||||
{
|
||||
Some(exact_path.to_vec())
|
||||
} else {
|
||||
None
|
||||
},
|
||||
},
|
||||
TypeData { search_unbox, inverted_function_signature_index: Vec::new() },
|
||||
);
|
||||
pathid
|
||||
}
|
||||
};
|
||||
used_in_function_signature.insert(isize::try_from(pathid).unwrap());
|
||||
RenderTypeId::Index(isize::try_from(pathid).unwrap())
|
||||
}
|
||||
|
||||
fn convert_render_type_id(
|
||||
id: RenderTypeId,
|
||||
cache: &mut Cache,
|
||||
itemid_to_pathid: &mut FxHashMap<ItemId, isize>,
|
||||
primitives: &mut FxHashMap<Symbol, isize>,
|
||||
associated_types: &mut FxHashMap<Symbol, isize>,
|
||||
lastpathid: &mut isize,
|
||||
crate_paths: &mut Vec<(ItemType, Vec<Symbol>, Option<Vec<Symbol>>, bool)>,
|
||||
serialized_index: &mut SerializedSearchIndex,
|
||||
used_in_function_signature: &mut BTreeSet<isize>,
|
||||
tcx: TyCtxt<'_>,
|
||||
) -> Option<RenderTypeId> {
|
||||
use crate::clean::PrimitiveType;
|
||||
@@ -192,39 +1503,55 @@ fn convert_render_type_id(
|
||||
};
|
||||
match id {
|
||||
RenderTypeId::Mut => Some(insert_into_map(
|
||||
primitives,
|
||||
kw::Mut,
|
||||
lastpathid,
|
||||
crate_paths,
|
||||
ItemType::Keyword,
|
||||
&[kw::Mut],
|
||||
None,
|
||||
search_unbox,
|
||||
serialized_index,
|
||||
used_in_function_signature,
|
||||
)),
|
||||
RenderTypeId::DefId(defid) => {
|
||||
if let Some(&(ref fqp, item_type)) =
|
||||
paths.get(&defid).or_else(|| external_paths.get(&defid))
|
||||
{
|
||||
let exact_fqp = exact_paths
|
||||
.get(&defid)
|
||||
.or_else(|| external_paths.get(&defid).map(|(fqp, _)| fqp))
|
||||
// Re-exports only count if the name is exactly the same.
|
||||
// This is a size optimization, since it means we only need
|
||||
// to store the name once (and the path is re-used for everything
|
||||
// exported from this same module). It's also likely to Do
|
||||
// What I Mean, since if a re-export changes the name, it might
|
||||
// also be a change in semantic meaning.
|
||||
.filter(|this_fqp| this_fqp.last() == fqp.last());
|
||||
Some(insert_into_map(
|
||||
itemid_to_pathid,
|
||||
ItemId::DefId(defid),
|
||||
lastpathid,
|
||||
crate_paths,
|
||||
item_type,
|
||||
fqp,
|
||||
exact_fqp.map(|x| &x[..]).filter(|exact_fqp| exact_fqp != fqp),
|
||||
search_unbox,
|
||||
))
|
||||
if tcx.lang_items().fn_mut_trait() == Some(defid)
|
||||
|| tcx.lang_items().fn_once_trait() == Some(defid)
|
||||
|| tcx.lang_items().fn_trait() == Some(defid)
|
||||
{
|
||||
let name = *fqp.last().unwrap();
|
||||
// Make absolutely sure we use this single, correct path,
|
||||
// because search.js needs to match. If we don't do this,
|
||||
// there are three different paths that these traits may
|
||||
// appear to come from.
|
||||
Some(insert_into_map(
|
||||
item_type,
|
||||
&[sym::core, sym::ops, name],
|
||||
Some(&[sym::core, sym::ops, name]),
|
||||
search_unbox,
|
||||
serialized_index,
|
||||
used_in_function_signature,
|
||||
))
|
||||
} else {
|
||||
let exact_fqp = exact_paths
|
||||
.get(&defid)
|
||||
.or_else(|| external_paths.get(&defid).map(|(fqp, _)| fqp))
|
||||
.map(|v| &v[..])
|
||||
// Re-exports only count if the name is exactly the same.
|
||||
// This is a size optimization, since it means we only need
|
||||
// to store the name once (and the path is re-used for everything
|
||||
// exported from this same module). It's also likely to Do
|
||||
// What I Mean, since if a re-export changes the name, it might
|
||||
// also be a change in semantic meaning.
|
||||
.filter(|this_fqp| this_fqp.last() == fqp.last());
|
||||
Some(insert_into_map(
|
||||
item_type,
|
||||
fqp,
|
||||
exact_fqp,
|
||||
search_unbox,
|
||||
serialized_index,
|
||||
used_in_function_signature,
|
||||
))
|
||||
}
|
||||
} else {
|
||||
None
|
||||
}
|
||||
@@ -232,26 +1559,25 @@ fn convert_render_type_id(
|
||||
RenderTypeId::Primitive(primitive) => {
|
||||
let sym = primitive.as_sym();
|
||||
Some(insert_into_map(
|
||||
primitives,
|
||||
sym,
|
||||
lastpathid,
|
||||
crate_paths,
|
||||
ItemType::Primitive,
|
||||
&[sym],
|
||||
None,
|
||||
search_unbox,
|
||||
serialized_index,
|
||||
used_in_function_signature,
|
||||
))
|
||||
}
|
||||
RenderTypeId::Index(_) => Some(id),
|
||||
RenderTypeId::Index(index) => {
|
||||
used_in_function_signature.insert(index);
|
||||
Some(id)
|
||||
}
|
||||
RenderTypeId::AssociatedType(sym) => Some(insert_into_map(
|
||||
associated_types,
|
||||
sym,
|
||||
lastpathid,
|
||||
crate_paths,
|
||||
ItemType::AssocType,
|
||||
&[sym],
|
||||
None,
|
||||
search_unbox,
|
||||
serialized_index,
|
||||
used_in_function_signature,
|
||||
)),
|
||||
}
|
||||
}
|
||||
@@ -259,11 +1585,8 @@ fn convert_render_type_id(
|
||||
fn convert_render_type(
|
||||
ty: &mut RenderType,
|
||||
cache: &mut Cache,
|
||||
itemid_to_pathid: &mut FxHashMap<ItemId, isize>,
|
||||
primitives: &mut FxHashMap<Symbol, isize>,
|
||||
associated_types: &mut FxHashMap<Symbol, isize>,
|
||||
lastpathid: &mut isize,
|
||||
crate_paths: &mut Vec<(ItemType, Vec<Symbol>, Option<Vec<Symbol>>, bool)>,
|
||||
serialized_index: &mut SerializedSearchIndex,
|
||||
used_in_function_signature: &mut BTreeSet<isize>,
|
||||
tcx: TyCtxt<'_>,
|
||||
) {
|
||||
if let Some(generics) = &mut ty.generics {
|
||||
@@ -271,11 +1594,8 @@ fn convert_render_type(
|
||||
convert_render_type(
|
||||
item,
|
||||
cache,
|
||||
itemid_to_pathid,
|
||||
primitives,
|
||||
associated_types,
|
||||
lastpathid,
|
||||
crate_paths,
|
||||
serialized_index,
|
||||
used_in_function_signature,
|
||||
tcx,
|
||||
);
|
||||
}
|
||||
@@ -285,11 +1605,8 @@ fn convert_render_type(
|
||||
let converted_associated_type = convert_render_type_id(
|
||||
*associated_type,
|
||||
cache,
|
||||
itemid_to_pathid,
|
||||
primitives,
|
||||
associated_types,
|
||||
lastpathid,
|
||||
crate_paths,
|
||||
serialized_index,
|
||||
used_in_function_signature,
|
||||
tcx,
|
||||
);
|
||||
let Some(converted_associated_type) = converted_associated_type else {
|
||||
@@ -300,11 +1617,8 @@ fn convert_render_type(
|
||||
convert_render_type(
|
||||
constraint,
|
||||
cache,
|
||||
itemid_to_pathid,
|
||||
primitives,
|
||||
associated_types,
|
||||
lastpathid,
|
||||
crate_paths,
|
||||
serialized_index,
|
||||
used_in_function_signature,
|
||||
tcx,
|
||||
);
|
||||
}
|
||||
@@ -318,24 +1632,74 @@ fn convert_render_type(
|
||||
ty.id = convert_render_type_id(
|
||||
id,
|
||||
cache,
|
||||
itemid_to_pathid,
|
||||
primitives,
|
||||
associated_types,
|
||||
lastpathid,
|
||||
crate_paths,
|
||||
serialized_index,
|
||||
used_in_function_signature,
|
||||
tcx,
|
||||
);
|
||||
use crate::clean::PrimitiveType;
|
||||
// These cases are added to the inverted index, but not actually included
|
||||
// in the signature. There's a matching set of cases in the
|
||||
// `unifyFunctionTypeIsMatchCandidate` function, for the slow path.
|
||||
match id {
|
||||
// typeNameIdOfArrayOrSlice
|
||||
RenderTypeId::Primitive(PrimitiveType::Array | PrimitiveType::Slice) => {
|
||||
insert_into_map(
|
||||
ItemType::Primitive,
|
||||
&[Symbol::intern("[]")],
|
||||
None,
|
||||
false,
|
||||
serialized_index,
|
||||
used_in_function_signature,
|
||||
);
|
||||
}
|
||||
RenderTypeId::Primitive(PrimitiveType::Tuple | PrimitiveType::Unit) => {
|
||||
// typeNameIdOfArrayOrSlice
|
||||
insert_into_map(
|
||||
ItemType::Primitive,
|
||||
&[Symbol::intern("()")],
|
||||
None,
|
||||
false,
|
||||
serialized_index,
|
||||
used_in_function_signature,
|
||||
);
|
||||
}
|
||||
// typeNameIdOfHof
|
||||
RenderTypeId::Primitive(PrimitiveType::Fn) => {
|
||||
insert_into_map(
|
||||
ItemType::Primitive,
|
||||
&[Symbol::intern("->")],
|
||||
None,
|
||||
false,
|
||||
serialized_index,
|
||||
used_in_function_signature,
|
||||
);
|
||||
}
|
||||
RenderTypeId::DefId(did)
|
||||
if tcx.lang_items().fn_mut_trait() == Some(did)
|
||||
|| tcx.lang_items().fn_once_trait() == Some(did)
|
||||
|| tcx.lang_items().fn_trait() == Some(did) =>
|
||||
{
|
||||
insert_into_map(
|
||||
ItemType::Primitive,
|
||||
&[Symbol::intern("->")],
|
||||
None,
|
||||
false,
|
||||
serialized_index,
|
||||
used_in_function_signature,
|
||||
);
|
||||
}
|
||||
// not special
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
if let Some(search_type) = &mut item.search_type {
|
||||
let mut used_in_function_signature = BTreeSet::new();
|
||||
for item in &mut search_type.inputs {
|
||||
convert_render_type(
|
||||
item,
|
||||
cache,
|
||||
&mut itemid_to_pathid,
|
||||
&mut primitives,
|
||||
&mut associated_types,
|
||||
&mut lastpathid,
|
||||
&mut crate_paths,
|
||||
&mut serialized_index,
|
||||
&mut used_in_function_signature,
|
||||
tcx,
|
||||
);
|
||||
}
|
||||
@@ -343,11 +1707,8 @@ fn convert_render_type(
|
||||
convert_render_type(
|
||||
item,
|
||||
cache,
|
||||
&mut itemid_to_pathid,
|
||||
&mut primitives,
|
||||
&mut associated_types,
|
||||
&mut lastpathid,
|
||||
&mut crate_paths,
|
||||
&mut serialized_index,
|
||||
&mut used_in_function_signature,
|
||||
tcx,
|
||||
);
|
||||
}
|
||||
@@ -356,464 +1717,56 @@ fn convert_render_type(
|
||||
convert_render_type(
|
||||
trait_,
|
||||
cache,
|
||||
&mut itemid_to_pathid,
|
||||
&mut primitives,
|
||||
&mut associated_types,
|
||||
&mut lastpathid,
|
||||
&mut crate_paths,
|
||||
&mut serialized_index,
|
||||
&mut used_in_function_signature,
|
||||
tcx,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let Cache { ref paths, ref exact_paths, ref external_paths, .. } = *cache;
|
||||
|
||||
// Then, on parent modules
|
||||
let crate_items: Vec<&IndexItem> = search_index
|
||||
.iter_mut()
|
||||
.map(|item| {
|
||||
item.parent_idx =
|
||||
item.parent.and_then(|defid| match itemid_to_pathid.entry(ItemId::DefId(defid)) {
|
||||
Entry::Occupied(entry) => Some(*entry.get()),
|
||||
Entry::Vacant(entry) => {
|
||||
let pathid = lastpathid;
|
||||
entry.insert(pathid);
|
||||
lastpathid += 1;
|
||||
|
||||
if let Some(&(ref fqp, short)) = paths.get(&defid) {
|
||||
let exact_fqp = exact_paths
|
||||
.get(&defid)
|
||||
.or_else(|| external_paths.get(&defid).map(|(fqp, _)| fqp))
|
||||
.filter(|exact_fqp| {
|
||||
exact_fqp.last() == Some(&item.name) && *exact_fqp != fqp
|
||||
});
|
||||
crate_paths.push((
|
||||
short,
|
||||
fqp.clone(),
|
||||
exact_fqp.cloned(),
|
||||
utils::has_doc_flag(tcx, defid, sym::search_unbox),
|
||||
));
|
||||
Some(pathid)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
if let Some(defid) = item.defid
|
||||
&& item.parent_idx.is_none()
|
||||
{
|
||||
// If this is a re-export, retain the original path.
|
||||
// Associated items don't use this.
|
||||
// Their parent carries the exact fqp instead.
|
||||
let exact_fqp = exact_paths
|
||||
.get(&defid)
|
||||
.or_else(|| external_paths.get(&defid).map(|(fqp, _)| fqp));
|
||||
item.exact_path = exact_fqp.and_then(|fqp| {
|
||||
// Re-exports only count if the name is exactly the same.
|
||||
// This is a size optimization, since it means we only need
|
||||
// to store the name once (and the path is re-used for everything
|
||||
// exported from this same module). It's also likely to Do
|
||||
// What I Mean, since if a re-export changes the name, it might
|
||||
// also be a change in semantic meaning.
|
||||
if fqp.last() != Some(&item.name) {
|
||||
return None;
|
||||
}
|
||||
let path =
|
||||
if item.ty == ItemType::Macro && tcx.has_attr(defid, sym::macro_export) {
|
||||
// `#[macro_export]` always exports to the crate root.
|
||||
tcx.crate_name(defid.krate).to_string()
|
||||
} else {
|
||||
if fqp.len() < 2 {
|
||||
return None;
|
||||
}
|
||||
join_path_syms(&fqp[..fqp.len() - 1])
|
||||
};
|
||||
if path == item.path {
|
||||
return None;
|
||||
}
|
||||
Some(path)
|
||||
});
|
||||
} else if let Some(parent_idx) = item.parent_idx {
|
||||
let i = <isize as TryInto<usize>>::try_into(parent_idx).unwrap();
|
||||
item.path = {
|
||||
let p = &crate_paths[i].1;
|
||||
join_path_syms(&p[..p.len() - 1])
|
||||
};
|
||||
item.exact_path =
|
||||
crate_paths[i].2.as_ref().map(|xp| join_path_syms(&xp[..xp.len() - 1]));
|
||||
}
|
||||
|
||||
// Omit the parent path if it is same to that of the prior item.
|
||||
if lastpath == item.path {
|
||||
item.path.clear();
|
||||
} else {
|
||||
lastpath = &item.path;
|
||||
}
|
||||
|
||||
&*item
|
||||
})
|
||||
.collect();
|
||||
|
||||
// Find associated items that need disambiguators
|
||||
let mut associated_item_duplicates = FxHashMap::<(isize, ItemType, Symbol), usize>::default();
|
||||
|
||||
for &item in &crate_items {
|
||||
if item.impl_id.is_some()
|
||||
&& let Some(parent_idx) = item.parent_idx
|
||||
{
|
||||
let count =
|
||||
associated_item_duplicates.entry((parent_idx, item.ty, item.name)).or_insert(0);
|
||||
*count += 1;
|
||||
}
|
||||
}
|
||||
|
||||
let associated_item_disambiguators = crate_items
|
||||
.iter()
|
||||
.enumerate()
|
||||
.filter_map(|(index, item)| {
|
||||
let impl_id = ItemId::DefId(item.impl_id?);
|
||||
let parent_idx = item.parent_idx?;
|
||||
let count = *associated_item_duplicates.get(&(parent_idx, item.ty, item.name))?;
|
||||
if count > 1 { Some((index, render::get_id_for_impl(tcx, impl_id))) } else { None }
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
struct CrateData<'a> {
|
||||
items: Vec<&'a IndexItem>,
|
||||
paths: Vec<(ItemType, Vec<Symbol>, Option<Vec<Symbol>>, bool)>,
|
||||
// The String is alias name and the vec is the list of the elements with this alias.
|
||||
//
|
||||
// To be noted: the `usize` elements are indexes to `items`.
|
||||
aliases: &'a AliasMap,
|
||||
// Used when a type has more than one impl with an associated item with the same name.
|
||||
associated_item_disambiguators: &'a Vec<(usize, String)>,
|
||||
// A list of shard lengths encoded as vlqhex. See the comment in write_vlqhex_to_string
|
||||
// for information on the format.
|
||||
desc_index: String,
|
||||
// A list of items with no description. This is eventually turned into a bitmap.
|
||||
empty_desc: Vec<u32>,
|
||||
}
|
||||
|
||||
struct Paths {
|
||||
ty: ItemType,
|
||||
name: Symbol,
|
||||
path: Option<usize>,
|
||||
exact_path: Option<usize>,
|
||||
search_unbox: bool,
|
||||
}
|
||||
|
||||
impl Serialize for Paths {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
let mut seq = serializer.serialize_seq(None)?;
|
||||
seq.serialize_element(&self.ty)?;
|
||||
seq.serialize_element(self.name.as_str())?;
|
||||
if let Some(ref path) = self.path {
|
||||
seq.serialize_element(path)?;
|
||||
}
|
||||
if let Some(ref path) = self.exact_path {
|
||||
assert!(self.path.is_some());
|
||||
seq.serialize_element(path)?;
|
||||
}
|
||||
if self.search_unbox {
|
||||
if self.path.is_none() {
|
||||
seq.serialize_element(&None::<u8>)?;
|
||||
}
|
||||
if self.exact_path.is_none() {
|
||||
seq.serialize_element(&None::<u8>)?;
|
||||
}
|
||||
seq.serialize_element(&1)?;
|
||||
}
|
||||
seq.end()
|
||||
}
|
||||
}
|
||||
|
||||
impl Serialize for CrateData<'_> {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
let mut extra_paths = FxHashMap::default();
|
||||
// We need to keep the order of insertion, hence why we use an `IndexMap`. Then we will
|
||||
// insert these "extra paths" (which are paths of items from external crates) into the
|
||||
// `full_paths` list at the end.
|
||||
let mut revert_extra_paths = FxIndexMap::default();
|
||||
let mut mod_paths = FxHashMap::default();
|
||||
for (index, item) in self.items.iter().enumerate() {
|
||||
if item.path.is_empty() {
|
||||
continue;
|
||||
}
|
||||
mod_paths.insert(&item.path, index);
|
||||
}
|
||||
let mut paths = Vec::with_capacity(self.paths.len());
|
||||
for &(ty, ref path, ref exact, search_unbox) in &self.paths {
|
||||
if path.len() < 2 {
|
||||
paths.push(Paths {
|
||||
ty,
|
||||
name: path[0],
|
||||
path: None,
|
||||
exact_path: None,
|
||||
search_unbox,
|
||||
});
|
||||
continue;
|
||||
}
|
||||
let full_path = join_path_syms(&path[..path.len() - 1]);
|
||||
let full_exact_path = exact
|
||||
.as_ref()
|
||||
.filter(|exact| exact.last() == path.last() && exact.len() >= 2)
|
||||
.map(|exact| join_path_syms(&exact[..exact.len() - 1]));
|
||||
let exact_path = extra_paths.len() + self.items.len();
|
||||
let exact_path = full_exact_path.as_ref().map(|full_exact_path| match extra_paths
|
||||
.entry(full_exact_path.clone())
|
||||
{
|
||||
Entry::Occupied(entry) => *entry.get(),
|
||||
Entry::Vacant(entry) => {
|
||||
if let Some(index) = mod_paths.get(&full_exact_path) {
|
||||
return *index;
|
||||
}
|
||||
entry.insert(exact_path);
|
||||
if !revert_extra_paths.contains_key(&exact_path) {
|
||||
revert_extra_paths.insert(exact_path, full_exact_path.clone());
|
||||
}
|
||||
exact_path
|
||||
}
|
||||
});
|
||||
if let Some(index) = mod_paths.get(&full_path) {
|
||||
paths.push(Paths {
|
||||
ty,
|
||||
name: *path.last().unwrap(),
|
||||
path: Some(*index),
|
||||
exact_path,
|
||||
search_unbox,
|
||||
});
|
||||
continue;
|
||||
}
|
||||
// It means it comes from an external crate so the item and its path will be
|
||||
// stored into another array.
|
||||
let search_type_size = search_type.size() +
|
||||
// Artificially give struct fields a size of 8 instead of their real
|
||||
// size of 2. This is because search.js sorts them to the end, so
|
||||
// by pushing them down, we prevent them from blocking real 2-arity functions.
|
||||
//
|
||||
// `index` is put after the last `mod_paths`
|
||||
let index = extra_paths.len() + self.items.len();
|
||||
match extra_paths.entry(full_path.clone()) {
|
||||
Entry::Occupied(entry) => {
|
||||
paths.push(Paths {
|
||||
ty,
|
||||
name: *path.last().unwrap(),
|
||||
path: Some(*entry.get()),
|
||||
exact_path,
|
||||
search_unbox,
|
||||
});
|
||||
}
|
||||
Entry::Vacant(entry) => {
|
||||
entry.insert(index);
|
||||
if !revert_extra_paths.contains_key(&index) {
|
||||
revert_extra_paths.insert(index, full_path);
|
||||
}
|
||||
paths.push(Paths {
|
||||
ty,
|
||||
name: *path.last().unwrap(),
|
||||
path: Some(index),
|
||||
exact_path,
|
||||
search_unbox,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Direct exports use adjacent arrays for the current crate's items,
|
||||
// but re-exported exact paths don't.
|
||||
let mut re_exports = Vec::new();
|
||||
for (item_index, item) in self.items.iter().enumerate() {
|
||||
if let Some(exact_path) = item.exact_path.as_ref() {
|
||||
if let Some(path_index) = mod_paths.get(&exact_path) {
|
||||
re_exports.push((item_index, *path_index));
|
||||
} else {
|
||||
let path_index = extra_paths.len() + self.items.len();
|
||||
let path_index = match extra_paths.entry(exact_path.clone()) {
|
||||
Entry::Occupied(entry) => *entry.get(),
|
||||
Entry::Vacant(entry) => {
|
||||
entry.insert(path_index);
|
||||
if !revert_extra_paths.contains_key(&path_index) {
|
||||
revert_extra_paths.insert(path_index, exact_path.clone());
|
||||
}
|
||||
path_index
|
||||
}
|
||||
};
|
||||
re_exports.push((item_index, path_index));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let mut names = Vec::with_capacity(self.items.len());
|
||||
let mut types = String::with_capacity(self.items.len());
|
||||
let mut full_paths = Vec::with_capacity(self.items.len());
|
||||
let mut parents = String::with_capacity(self.items.len());
|
||||
let mut parents_backref_queue = VecDeque::new();
|
||||
let mut functions = String::with_capacity(self.items.len());
|
||||
let mut deprecated = Vec::with_capacity(self.items.len());
|
||||
|
||||
let mut type_backref_queue = VecDeque::new();
|
||||
|
||||
let mut last_name = None;
|
||||
for (index, item) in self.items.iter().enumerate() {
|
||||
let n = item.ty as u8;
|
||||
let c = char::from(n + b'A');
|
||||
assert!(c <= 'z', "item types must fit within ASCII printables");
|
||||
types.push(c);
|
||||
|
||||
assert_eq!(
|
||||
item.parent.is_some(),
|
||||
item.parent_idx.is_some(),
|
||||
"`{}` is missing idx",
|
||||
item.name
|
||||
);
|
||||
assert!(
|
||||
parents_backref_queue.len() <= 16,
|
||||
"the string encoding only supports 16 slots of lookback"
|
||||
);
|
||||
let parent: i32 = item.parent_idx.map(|x| x + 1).unwrap_or(0).try_into().unwrap();
|
||||
if let Some(idx) = parents_backref_queue.iter().position(|p: &i32| *p == parent) {
|
||||
parents.push(
|
||||
char::try_from('0' as u32 + u32::try_from(idx).unwrap())
|
||||
.expect("last possible value is '?'"),
|
||||
);
|
||||
} else if parent == 0 {
|
||||
write_vlqhex_to_string(parent, &mut parents);
|
||||
// The number 8 is arbitrary. We want it big, but not enormous,
|
||||
// because the postings list has to fill in an empty array for each
|
||||
// unoccupied size.
|
||||
if item.ty.is_fn_like() { 0 } else { 16 };
|
||||
serialized_index.function_data[new_entry_id] = Some(FunctionData {
|
||||
function_signature: {
|
||||
let mut function_signature = String::new();
|
||||
search_type.write_to_string_without_param_names(&mut function_signature);
|
||||
function_signature
|
||||
},
|
||||
param_names: search_type
|
||||
.param_names
|
||||
.iter()
|
||||
.map(|sym| sym.map(|sym| sym.to_string()).unwrap_or(String::new()))
|
||||
.collect::<Vec<String>>(),
|
||||
});
|
||||
for index in used_in_function_signature {
|
||||
let postings = if index >= 0 {
|
||||
assert!(serialized_index.path_data[index as usize].is_some());
|
||||
&mut serialized_index.type_data[index as usize]
|
||||
.as_mut()
|
||||
.unwrap()
|
||||
.inverted_function_signature_index
|
||||
} else {
|
||||
parents_backref_queue.push_front(parent);
|
||||
write_vlqhex_to_string(parent, &mut parents);
|
||||
if parents_backref_queue.len() > 16 {
|
||||
parents_backref_queue.pop_back();
|
||||
let generic_id = usize::try_from(-index).unwrap() - 1;
|
||||
for _ in serialized_index.generic_inverted_index.len()..=generic_id {
|
||||
serialized_index.generic_inverted_index.push(Vec::new());
|
||||
}
|
||||
&mut serialized_index.generic_inverted_index[generic_id]
|
||||
};
|
||||
while postings.len() <= search_type_size {
|
||||
postings.push(Vec::new());
|
||||
}
|
||||
|
||||
if Some(item.name.as_str()) == last_name {
|
||||
names.push("");
|
||||
} else {
|
||||
names.push(item.name.as_str());
|
||||
last_name = Some(item.name.as_str());
|
||||
}
|
||||
|
||||
if !item.path.is_empty() {
|
||||
full_paths.push((index, &item.path));
|
||||
}
|
||||
|
||||
match &item.search_type {
|
||||
Some(ty) => ty.write_to_string(&mut functions, &mut type_backref_queue),
|
||||
None => functions.push('`'),
|
||||
}
|
||||
|
||||
if item.deprecation.is_some() {
|
||||
// bitmasks always use 1-indexing for items, with 0 as the crate itself
|
||||
deprecated.push(u32::try_from(index + 1).unwrap());
|
||||
}
|
||||
postings[search_type_size].push(new_entry_id as u32);
|
||||
}
|
||||
|
||||
for (index, path) in &revert_extra_paths {
|
||||
full_paths.push((*index, path));
|
||||
}
|
||||
|
||||
let param_names: Vec<(usize, String)> = {
|
||||
let mut prev = Vec::new();
|
||||
let mut result = Vec::new();
|
||||
for (index, item) in self.items.iter().enumerate() {
|
||||
if let Some(ty) = &item.search_type
|
||||
&& let my = ty
|
||||
.param_names
|
||||
.iter()
|
||||
.filter_map(|sym| sym.map(|sym| sym.to_string()))
|
||||
.collect::<Vec<_>>()
|
||||
&& my != prev
|
||||
{
|
||||
result.push((index, my.join(",")));
|
||||
prev = my;
|
||||
}
|
||||
}
|
||||
result
|
||||
};
|
||||
|
||||
let has_aliases = !self.aliases.is_empty();
|
||||
let mut crate_data =
|
||||
serializer.serialize_struct("CrateData", if has_aliases { 13 } else { 12 })?;
|
||||
crate_data.serialize_field("t", &types)?;
|
||||
crate_data.serialize_field("n", &names)?;
|
||||
crate_data.serialize_field("q", &full_paths)?;
|
||||
crate_data.serialize_field("i", &parents)?;
|
||||
crate_data.serialize_field("f", &functions)?;
|
||||
crate_data.serialize_field("D", &self.desc_index)?;
|
||||
crate_data.serialize_field("p", &paths)?;
|
||||
crate_data.serialize_field("r", &re_exports)?;
|
||||
crate_data.serialize_field("b", &self.associated_item_disambiguators)?;
|
||||
crate_data.serialize_field("c", &bitmap_to_string(&deprecated))?;
|
||||
crate_data.serialize_field("e", &bitmap_to_string(&self.empty_desc))?;
|
||||
crate_data.serialize_field("P", ¶m_names)?;
|
||||
if has_aliases {
|
||||
crate_data.serialize_field("a", &self.aliases)?;
|
||||
}
|
||||
crate_data.end()
|
||||
}
|
||||
}
|
||||
|
||||
let (empty_desc, desc) = {
|
||||
let mut empty_desc = Vec::new();
|
||||
let mut result = Vec::new();
|
||||
let mut set = String::new();
|
||||
let mut len: usize = 0;
|
||||
let mut item_index: u32 = 0;
|
||||
for desc in std::iter::once(&crate_doc).chain(crate_items.iter().map(|item| &item.desc)) {
|
||||
if desc.is_empty() {
|
||||
empty_desc.push(item_index);
|
||||
item_index += 1;
|
||||
continue;
|
||||
}
|
||||
if set.len() >= DESC_INDEX_SHARD_LEN {
|
||||
result.push((len, std::mem::take(&mut set)));
|
||||
len = 0;
|
||||
} else if len != 0 {
|
||||
set.push('\n');
|
||||
}
|
||||
set.push_str(desc);
|
||||
len += 1;
|
||||
item_index += 1;
|
||||
}
|
||||
result.push((len, std::mem::take(&mut set)));
|
||||
(empty_desc, result)
|
||||
};
|
||||
|
||||
let desc_index = {
|
||||
let mut desc_index = String::with_capacity(desc.len() * 4);
|
||||
for &(len, _) in desc.iter() {
|
||||
write_vlqhex_to_string(len.try_into().unwrap(), &mut desc_index);
|
||||
}
|
||||
desc_index
|
||||
};
|
||||
|
||||
assert_eq!(
|
||||
crate_items.len() + 1,
|
||||
desc.iter().map(|(len, _)| *len).sum::<usize>() + empty_desc.len()
|
||||
);
|
||||
|
||||
// The index, which is actually used to search, is JSON
|
||||
// It uses `JSON.parse(..)` to actually load, since JSON
|
||||
// parses faster than the full JavaScript syntax.
|
||||
let crate_name = krate.name(tcx);
|
||||
let data = CrateData {
|
||||
items: crate_items,
|
||||
paths: crate_paths,
|
||||
aliases: &aliases,
|
||||
associated_item_disambiguators: &associated_item_disambiguators,
|
||||
desc_index,
|
||||
empty_desc,
|
||||
};
|
||||
let index = OrderedJson::array_unsorted([
|
||||
OrderedJson::serialize(crate_name.as_str()).unwrap(),
|
||||
OrderedJson::serialize(data).unwrap(),
|
||||
]);
|
||||
SerializedSearchIndex { index, desc }
|
||||
Ok(serialized_index.sort())
|
||||
}
|
||||
|
||||
pub(crate) fn get_function_type_for_search(
|
||||
|
||||
@@ -1,6 +1,4 @@
|
||||
use base64::prelude::*;
|
||||
|
||||
pub(crate) fn write_vlqhex_to_string(n: i32, string: &mut String) {
|
||||
pub(crate) fn write_signed_vlqhex_to_string(n: i32, string: &mut String) {
|
||||
let (sign, magnitude): (bool, u32) =
|
||||
if n >= 0 { (false, n.try_into().unwrap()) } else { (true, (-n).try_into().unwrap()) };
|
||||
// zig-zag encoding
|
||||
@@ -37,206 +35,66 @@ pub(crate) fn write_vlqhex_to_string(n: i32, string: &mut String) {
|
||||
}
|
||||
}
|
||||
|
||||
// Used during bitmap encoding
|
||||
enum Container {
|
||||
/// number of ones, bits
|
||||
Bits(Box<[u64; 1024]>),
|
||||
/// list of entries
|
||||
Array(Vec<u16>),
|
||||
/// list of (start, len-1)
|
||||
Run(Vec<(u16, u16)>),
|
||||
pub fn read_signed_vlqhex_from_string(string: &[u8]) -> Option<(i32, usize)> {
|
||||
let mut n = 0i32;
|
||||
let mut i = 0;
|
||||
while let Some(&c) = string.get(i) {
|
||||
i += 1;
|
||||
n = (n << 4) | i32::from(c & 0xF);
|
||||
if c >= 96 {
|
||||
// zig-zag encoding
|
||||
let (sign, magnitude) = (n & 1, n >> 1);
|
||||
let value = if sign == 0 { 1 } else { -1 } * magnitude;
|
||||
return Some((value, i));
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
impl Container {
|
||||
fn popcount(&self) -> u32 {
|
||||
match self {
|
||||
Container::Bits(bits) => bits.iter().copied().map(|x| x.count_ones()).sum(),
|
||||
Container::Array(array) => {
|
||||
array.len().try_into().expect("array can't be bigger than 2**32")
|
||||
}
|
||||
Container::Run(runs) => {
|
||||
runs.iter().copied().map(|(_, lenm1)| u32::from(lenm1) + 1).sum()
|
||||
}
|
||||
|
||||
pub fn write_postings_to_string(postings: &[Vec<u32>], buf: &mut Vec<u8>) {
|
||||
for list in postings {
|
||||
if list.is_empty() {
|
||||
buf.push(0);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
fn push(&mut self, value: u16) {
|
||||
match self {
|
||||
Container::Bits(bits) => bits[value as usize >> 6] |= 1 << (value & 0x3F),
|
||||
Container::Array(array) => {
|
||||
array.push(value);
|
||||
if array.len() >= 4096 {
|
||||
let array = std::mem::take(array);
|
||||
*self = Container::Bits(Box::new([0; 1024]));
|
||||
for value in array {
|
||||
self.push(value);
|
||||
}
|
||||
}
|
||||
let len_before = buf.len();
|
||||
stringdex::internals::encode::write_bitmap_to_bytes(&list, &mut *buf).unwrap();
|
||||
let len_after = buf.len();
|
||||
if len_after - len_before > 1 + (4 * list.len()) && list.len() < 0x3a {
|
||||
buf.truncate(len_before);
|
||||
buf.push(list.len() as u8);
|
||||
for &item in list {
|
||||
buf.push(item as u8);
|
||||
buf.push((item >> 8) as u8);
|
||||
buf.push((item >> 16) as u8);
|
||||
buf.push((item >> 24) as u8);
|
||||
}
|
||||
Container::Run(runs) => {
|
||||
if let Some(r) = runs.last_mut()
|
||||
&& r.0 + r.1 + 1 == value
|
||||
{
|
||||
r.1 += 1;
|
||||
} else {
|
||||
runs.push((value, 0));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
fn try_make_run(&mut self) -> bool {
|
||||
match self {
|
||||
Container::Bits(bits) => {
|
||||
let mut r: u64 = 0;
|
||||
for (i, chunk) in bits.iter().copied().enumerate() {
|
||||
let next_chunk =
|
||||
i.checked_add(1).and_then(|i| bits.get(i)).copied().unwrap_or(0);
|
||||
r += !chunk & u64::from((chunk << 1).count_ones());
|
||||
r += !next_chunk & u64::from((chunk >> 63).count_ones());
|
||||
}
|
||||
if (2 + 4 * r) >= 8192 {
|
||||
return false;
|
||||
}
|
||||
let bits = std::mem::replace(bits, Box::new([0; 1024]));
|
||||
*self = Container::Run(Vec::new());
|
||||
for (i, bits) in bits.iter().copied().enumerate() {
|
||||
if bits == 0 {
|
||||
continue;
|
||||
}
|
||||
for j in 0..64 {
|
||||
let value = (u16::try_from(i).unwrap() << 6) | j;
|
||||
if bits & (1 << j) != 0 {
|
||||
self.push(value);
|
||||
}
|
||||
}
|
||||
}
|
||||
true
|
||||
}
|
||||
Container::Array(array) if array.len() <= 5 => false,
|
||||
Container::Array(array) => {
|
||||
let mut r = 0;
|
||||
let mut prev = None;
|
||||
for value in array.iter().copied() {
|
||||
if value.checked_sub(1) != prev {
|
||||
r += 1;
|
||||
}
|
||||
prev = Some(value);
|
||||
}
|
||||
if 2 + 4 * r >= 2 * array.len() + 2 {
|
||||
return false;
|
||||
}
|
||||
let array = std::mem::take(array);
|
||||
*self = Container::Run(Vec::new());
|
||||
for value in array {
|
||||
self.push(value);
|
||||
}
|
||||
true
|
||||
}
|
||||
Container::Run(_) => true,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// checked against roaring-rs in
|
||||
// https://gitlab.com/notriddle/roaring-test
|
||||
pub(crate) fn write_bitmap_to_bytes(
|
||||
domain: &[u32],
|
||||
mut out: impl std::io::Write,
|
||||
) -> std::io::Result<()> {
|
||||
// https://arxiv.org/pdf/1603.06549.pdf
|
||||
let mut keys = Vec::<u16>::new();
|
||||
let mut containers = Vec::<Container>::new();
|
||||
let mut key: u16;
|
||||
let mut domain_iter = domain.iter().copied().peekable();
|
||||
let mut has_run = false;
|
||||
while let Some(entry) = domain_iter.next() {
|
||||
key = (entry >> 16).try_into().expect("shifted off the top 16 bits, so it should fit");
|
||||
let value: u16 = (entry & 0x00_00_FF_FF).try_into().expect("AND 16 bits, so it should fit");
|
||||
let mut container = Container::Array(vec![value]);
|
||||
while let Some(entry) = domain_iter.peek().copied() {
|
||||
let entry_key: u16 =
|
||||
(entry >> 16).try_into().expect("shifted off the top 16 bits, so it should fit");
|
||||
if entry_key != key {
|
||||
break;
|
||||
pub fn read_postings_from_string(postings: &mut Vec<Vec<u32>>, mut buf: &[u8]) {
|
||||
use stringdex::internals::decode::RoaringBitmap;
|
||||
while let Some(&c) = buf.get(0) {
|
||||
if c < 0x3a {
|
||||
buf = &buf[1..];
|
||||
let mut slot = Vec::new();
|
||||
for _ in 0..c {
|
||||
slot.push(
|
||||
(buf[0] as u32)
|
||||
| ((buf[1] as u32) << 8)
|
||||
| ((buf[2] as u32) << 16)
|
||||
| ((buf[3] as u32) << 24),
|
||||
);
|
||||
buf = &buf[4..];
|
||||
}
|
||||
domain_iter.next().expect("peeking just succeeded");
|
||||
container
|
||||
.push((entry & 0x00_00_FF_FF).try_into().expect("AND 16 bits, so it should fit"));
|
||||
}
|
||||
keys.push(key);
|
||||
has_run = container.try_make_run() || has_run;
|
||||
containers.push(container);
|
||||
}
|
||||
// https://github.com/RoaringBitmap/RoaringFormatSpec
|
||||
const SERIAL_COOKIE_NO_RUNCONTAINER: u32 = 12346;
|
||||
const SERIAL_COOKIE: u32 = 12347;
|
||||
const NO_OFFSET_THRESHOLD: u32 = 4;
|
||||
let size: u32 = containers.len().try_into().unwrap();
|
||||
let start_offset = if has_run {
|
||||
out.write_all(&u32::to_le_bytes(SERIAL_COOKIE | ((size - 1) << 16)))?;
|
||||
for set in containers.chunks(8) {
|
||||
let mut b = 0;
|
||||
for (i, container) in set.iter().enumerate() {
|
||||
if matches!(container, &Container::Run(..)) {
|
||||
b |= 1 << i;
|
||||
}
|
||||
}
|
||||
out.write_all(&[b])?;
|
||||
}
|
||||
if size < NO_OFFSET_THRESHOLD {
|
||||
4 + 4 * size + size.div_ceil(8)
|
||||
postings.push(slot);
|
||||
} else {
|
||||
4 + 8 * size + size.div_ceil(8)
|
||||
}
|
||||
} else {
|
||||
out.write_all(&u32::to_le_bytes(SERIAL_COOKIE_NO_RUNCONTAINER))?;
|
||||
out.write_all(&u32::to_le_bytes(containers.len().try_into().unwrap()))?;
|
||||
4 + 4 + 4 * size + 4 * size
|
||||
};
|
||||
for (&key, container) in keys.iter().zip(&containers) {
|
||||
// descriptive header
|
||||
let key: u32 = key.into();
|
||||
let count: u32 = container.popcount() - 1;
|
||||
out.write_all(&u32::to_le_bytes((count << 16) | key))?;
|
||||
}
|
||||
if !has_run || size >= NO_OFFSET_THRESHOLD {
|
||||
// offset header
|
||||
let mut starting_offset = start_offset;
|
||||
for container in &containers {
|
||||
out.write_all(&u32::to_le_bytes(starting_offset))?;
|
||||
starting_offset += match container {
|
||||
Container::Bits(_) => 8192u32,
|
||||
Container::Array(array) => u32::try_from(array.len()).unwrap() * 2,
|
||||
Container::Run(runs) => 2 + u32::try_from(runs.len()).unwrap() * 4,
|
||||
};
|
||||
let (bitmap, consumed_bytes_len) =
|
||||
RoaringBitmap::from_bytes(buf).unwrap_or_else(|| (RoaringBitmap::default(), 0));
|
||||
assert_ne!(consumed_bytes_len, 0);
|
||||
postings.push(bitmap.to_vec());
|
||||
buf = &buf[consumed_bytes_len..];
|
||||
}
|
||||
}
|
||||
for container in &containers {
|
||||
match container {
|
||||
Container::Bits(bits) => {
|
||||
for chunk in bits.iter() {
|
||||
out.write_all(&u64::to_le_bytes(*chunk))?;
|
||||
}
|
||||
}
|
||||
Container::Array(array) => {
|
||||
for value in array.iter() {
|
||||
out.write_all(&u16::to_le_bytes(*value))?;
|
||||
}
|
||||
}
|
||||
Container::Run(runs) => {
|
||||
out.write_all(&u16::to_le_bytes(runs.len().try_into().unwrap()))?;
|
||||
for (start, lenm1) in runs.iter().copied() {
|
||||
out.write_all(&u16::to_le_bytes(start))?;
|
||||
out.write_all(&u16::to_le_bytes(lenm1))?;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub(crate) fn bitmap_to_string(domain: &[u32]) -> String {
|
||||
let mut buf = Vec::new();
|
||||
let mut strbuf = String::new();
|
||||
write_bitmap_to_bytes(domain, &mut buf).unwrap();
|
||||
BASE64_STANDARD.encode_string(&buf, &mut strbuf);
|
||||
strbuf
|
||||
}
|
||||
|
||||
@@ -65,17 +65,17 @@ pub(crate) fn write_shared(
|
||||
// Write shared runs within a flock; disable thread dispatching of IO temporarily.
|
||||
let _lock = try_err!(flock::Lock::new(&lock_file, true, true, true), &lock_file);
|
||||
|
||||
let SerializedSearchIndex { index, desc } = build_index(krate, &mut cx.shared.cache, tcx);
|
||||
write_search_desc(cx, krate, &desc)?; // does not need to be merged
|
||||
let search_index =
|
||||
build_index(krate, &mut cx.shared.cache, tcx, &cx.dst, &cx.shared.resource_suffix)?;
|
||||
|
||||
let crate_name = krate.name(cx.tcx());
|
||||
let crate_name = crate_name.as_str(); // rand
|
||||
let crate_name_json = OrderedJson::serialize(crate_name).unwrap(); // "rand"
|
||||
let external_crates = hack_get_external_crate_names(&cx.dst, &cx.shared.resource_suffix)?;
|
||||
let info = CrateInfo {
|
||||
version: CrateInfoVersion::V1,
|
||||
version: CrateInfoVersion::V2,
|
||||
src_files_js: SourcesPart::get(cx, &crate_name_json)?,
|
||||
search_index_js: SearchIndexPart::get(index, &cx.shared.resource_suffix)?,
|
||||
search_index,
|
||||
all_crates: AllCratesPart::get(crate_name_json.clone(), &cx.shared.resource_suffix)?,
|
||||
crates_index: CratesIndexPart::get(crate_name, &external_crates)?,
|
||||
trait_impl: TraitAliasPart::get(cx, &crate_name_json)?,
|
||||
@@ -141,7 +141,7 @@ pub(crate) fn write_not_crate_specific(
|
||||
resource_suffix: &str,
|
||||
include_sources: bool,
|
||||
) -> Result<(), Error> {
|
||||
write_rendered_cross_crate_info(crates, dst, opt, include_sources)?;
|
||||
write_rendered_cross_crate_info(crates, dst, opt, include_sources, resource_suffix)?;
|
||||
write_static_files(dst, opt, style_files, css_file_extension, resource_suffix)?;
|
||||
Ok(())
|
||||
}
|
||||
@@ -151,13 +151,18 @@ fn write_rendered_cross_crate_info(
|
||||
dst: &Path,
|
||||
opt: &RenderOptions,
|
||||
include_sources: bool,
|
||||
resource_suffix: &str,
|
||||
) -> Result<(), Error> {
|
||||
let m = &opt.should_merge;
|
||||
if opt.should_emit_crate() {
|
||||
if include_sources {
|
||||
write_rendered_cci::<SourcesPart, _>(SourcesPart::blank, dst, crates, m)?;
|
||||
}
|
||||
write_rendered_cci::<SearchIndexPart, _>(SearchIndexPart::blank, dst, crates, m)?;
|
||||
crates
|
||||
.iter()
|
||||
.fold(SerializedSearchIndex::default(), |a, b| a.union(&b.search_index))
|
||||
.sort()
|
||||
.write_to(dst, resource_suffix)?;
|
||||
write_rendered_cci::<AllCratesPart, _>(AllCratesPart::blank, dst, crates, m)?;
|
||||
}
|
||||
write_rendered_cci::<TraitAliasPart, _>(TraitAliasPart::blank, dst, crates, m)?;
|
||||
@@ -215,38 +220,12 @@ fn write_static_files(
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Write the search description shards to disk
|
||||
fn write_search_desc(
|
||||
cx: &mut Context<'_>,
|
||||
krate: &Crate,
|
||||
search_desc: &[(usize, String)],
|
||||
) -> Result<(), Error> {
|
||||
let crate_name = krate.name(cx.tcx()).to_string();
|
||||
let encoded_crate_name = OrderedJson::serialize(&crate_name).unwrap();
|
||||
let path = PathBuf::from_iter([&cx.dst, Path::new("search.desc"), Path::new(&crate_name)]);
|
||||
if path.exists() {
|
||||
try_err!(fs::remove_dir_all(&path), &path);
|
||||
}
|
||||
for (i, (_, part)) in search_desc.iter().enumerate() {
|
||||
let filename = static_files::suffix_path(
|
||||
&format!("{crate_name}-desc-{i}-.js"),
|
||||
&cx.shared.resource_suffix,
|
||||
);
|
||||
let path = path.join(filename);
|
||||
let part = OrderedJson::serialize(part).unwrap();
|
||||
let part = format!("searchState.loadedDescShard({encoded_crate_name}, {i}, {part})");
|
||||
create_parents(&path)?;
|
||||
try_err!(fs::write(&path, part), &path);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Contains pre-rendered contents to insert into the CCI template
|
||||
#[derive(Serialize, Deserialize, Clone, Debug)]
|
||||
pub(crate) struct CrateInfo {
|
||||
version: CrateInfoVersion,
|
||||
src_files_js: PartsAndLocations<SourcesPart>,
|
||||
search_index_js: PartsAndLocations<SearchIndexPart>,
|
||||
search_index: SerializedSearchIndex,
|
||||
all_crates: PartsAndLocations<AllCratesPart>,
|
||||
crates_index: PartsAndLocations<CratesIndexPart>,
|
||||
trait_impl: PartsAndLocations<TraitAliasPart>,
|
||||
@@ -277,7 +256,7 @@ pub(crate) fn read_many(parts_paths: &[PathToParts]) -> Result<Vec<Self>, Error>
|
||||
/// to provide better diagnostics about including an invalid file.
|
||||
#[derive(Serialize, Deserialize, Clone, Debug)]
|
||||
enum CrateInfoVersion {
|
||||
V1,
|
||||
V2,
|
||||
}
|
||||
|
||||
/// Paths (relative to the doc root) and their pre-merge contents
|
||||
@@ -331,36 +310,6 @@ trait CciPart: Sized + fmt::Display + DeserializeOwned + 'static {
|
||||
fn from_crate_info(crate_info: &CrateInfo) -> &PartsAndLocations<Self>;
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Clone, Default, Debug)]
|
||||
struct SearchIndex;
|
||||
type SearchIndexPart = Part<SearchIndex, EscapedJson>;
|
||||
impl CciPart for SearchIndexPart {
|
||||
type FileFormat = sorted_template::Js;
|
||||
fn from_crate_info(crate_info: &CrateInfo) -> &PartsAndLocations<Self> {
|
||||
&crate_info.search_index_js
|
||||
}
|
||||
}
|
||||
|
||||
impl SearchIndexPart {
|
||||
fn blank() -> SortedTemplate<<Self as CciPart>::FileFormat> {
|
||||
SortedTemplate::from_before_after(
|
||||
r"var searchIndex = new Map(JSON.parse('[",
|
||||
r"]'));
|
||||
if (typeof exports !== 'undefined') exports.searchIndex = searchIndex;
|
||||
else if (window.initSearch) window.initSearch(searchIndex);",
|
||||
)
|
||||
}
|
||||
|
||||
fn get(
|
||||
search_index: OrderedJson,
|
||||
resource_suffix: &str,
|
||||
) -> Result<PartsAndLocations<Self>, Error> {
|
||||
let path = suffix_path("search-index.js", resource_suffix);
|
||||
let search_index = EscapedJson::from(search_index);
|
||||
Ok(PartsAndLocations::with(path, search_index))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Clone, Default, Debug)]
|
||||
struct AllCrates;
|
||||
type AllCratesPart = Part<AllCrates, OrderedJson>;
|
||||
@@ -426,6 +375,7 @@ impl CratesIndexPart {
|
||||
fn blank(cx: &Context<'_>) -> SortedTemplate<<Self as CciPart>::FileFormat> {
|
||||
let page = layout::Page {
|
||||
title: "Index of crates",
|
||||
short_title: "Crates",
|
||||
css_class: "mod sys",
|
||||
root_path: "./",
|
||||
static_root_path: cx.shared.static_root_path.as_deref(),
|
||||
|
||||
@@ -29,14 +29,6 @@ fn sources_template() {
|
||||
assert_eq!(but_last_line(&template.to_string()), r#"createSrcSidebar('["u","v"]');"#);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn sources_parts() {
|
||||
let parts =
|
||||
SearchIndexPart::get(OrderedJson::serialize(["foo", "bar"]).unwrap(), "suffix").unwrap();
|
||||
assert_eq!(&parts.parts[0].0, Path::new("search-indexsuffix.js"));
|
||||
assert_eq!(&parts.parts[0].1.to_string(), r#"["foo","bar"]"#);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn all_crates_template() {
|
||||
let mut template = AllCratesPart::blank();
|
||||
@@ -54,31 +46,6 @@ fn all_crates_parts() {
|
||||
assert_eq!(&parts.parts[0].1.to_string(), r#""crate""#);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn search_index_template() {
|
||||
let mut template = SearchIndexPart::blank();
|
||||
assert_eq!(
|
||||
but_last_line(&template.to_string()),
|
||||
r"var searchIndex = new Map(JSON.parse('[]'));
|
||||
if (typeof exports !== 'undefined') exports.searchIndex = searchIndex;
|
||||
else if (window.initSearch) window.initSearch(searchIndex);"
|
||||
);
|
||||
template.append(EscapedJson::from(OrderedJson::serialize([1, 2]).unwrap()).to_string());
|
||||
assert_eq!(
|
||||
but_last_line(&template.to_string()),
|
||||
r"var searchIndex = new Map(JSON.parse('[[1,2]]'));
|
||||
if (typeof exports !== 'undefined') exports.searchIndex = searchIndex;
|
||||
else if (window.initSearch) window.initSearch(searchIndex);"
|
||||
);
|
||||
template.append(EscapedJson::from(OrderedJson::serialize([4, 3]).unwrap()).to_string());
|
||||
assert_eq!(
|
||||
but_last_line(&template.to_string()),
|
||||
r"var searchIndex = new Map(JSON.parse('[[1,2],[4,3]]'));
|
||||
if (typeof exports !== 'undefined') exports.searchIndex = searchIndex;
|
||||
else if (window.initSearch) window.initSearch(searchIndex);"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn crates_index_part() {
|
||||
let external_crates = ["bar".to_string(), "baz".to_string()];
|
||||
|
||||
@@ -230,6 +230,7 @@ fn emit_source(
|
||||
);
|
||||
let page = layout::Page {
|
||||
title: &title,
|
||||
short_title: &src_fname.to_string_lossy(),
|
||||
css_class: "src",
|
||||
root_path: &root_path,
|
||||
static_root_path: shared.static_root_path.as_deref(),
|
||||
|
||||
@@ -258,6 +258,17 @@ h1, h2, h3, h4 {
|
||||
padding-bottom: 6px;
|
||||
margin-bottom: 15px;
|
||||
}
|
||||
.search-results-main-heading {
|
||||
grid-template-areas:
|
||||
"main-heading-breadcrumbs main-heading-placeholder"
|
||||
"main-heading-breadcrumbs main-heading-toolbar "
|
||||
"main-heading-h1 main-heading-toolbar ";
|
||||
}
|
||||
.search-results-main-heading nav.sub {
|
||||
grid-area: main-heading-h1;
|
||||
align-items: end;
|
||||
margin: 4px 0 8px 0;
|
||||
}
|
||||
.rustdoc-breadcrumbs {
|
||||
grid-area: main-heading-breadcrumbs;
|
||||
line-height: 1.25;
|
||||
@@ -265,6 +276,16 @@ h1, h2, h3, h4 {
|
||||
position: relative;
|
||||
z-index: 1;
|
||||
}
|
||||
.search-switcher {
|
||||
grid-area: main-heading-breadcrumbs;
|
||||
line-height: 1.5;
|
||||
display: flex;
|
||||
color: var(--main-color);
|
||||
align-items: baseline;
|
||||
white-space: nowrap;
|
||||
padding-top: 8px;
|
||||
min-height: 34px;
|
||||
}
|
||||
.rustdoc-breadcrumbs a {
|
||||
padding: 5px 0 7px;
|
||||
}
|
||||
@@ -305,7 +326,7 @@ h4.code-header {
|
||||
#crate-search,
|
||||
h1, h2, h3, h4, h5, h6,
|
||||
.sidebar,
|
||||
.mobile-topbar,
|
||||
rustdoc-topbar,
|
||||
.search-input,
|
||||
.search-results .result-name,
|
||||
.item-table dt > a,
|
||||
@@ -317,6 +338,7 @@ rustdoc-toolbar,
|
||||
summary.hideme,
|
||||
.scraped-example-list,
|
||||
.rustdoc-breadcrumbs,
|
||||
.search-switcher,
|
||||
/* This selector is for the items listed in the "all items" page. */
|
||||
ul.all-items {
|
||||
font-family: "Fira Sans", Arial, NanumBarunGothic, sans-serif;
|
||||
@@ -329,7 +351,7 @@ a.anchor,
|
||||
.rust a,
|
||||
.sidebar h2 a,
|
||||
.sidebar h3 a,
|
||||
.mobile-topbar h2 a,
|
||||
rustdoc-topbar h2 a,
|
||||
h1 a,
|
||||
.search-results a,
|
||||
.search-results li,
|
||||
@@ -616,7 +638,7 @@ img {
|
||||
color: var(--sidebar-resizer-active);
|
||||
}
|
||||
|
||||
.sidebar, .mobile-topbar, .sidebar-menu-toggle,
|
||||
.sidebar, rustdoc-topbar, .sidebar-menu-toggle,
|
||||
#src-sidebar {
|
||||
background-color: var(--sidebar-background-color);
|
||||
}
|
||||
@@ -857,7 +879,7 @@ ul.block, .block li, .block ul {
|
||||
margin-bottom: 1rem;
|
||||
}
|
||||
|
||||
.mobile-topbar {
|
||||
rustdoc-topbar {
|
||||
display: none;
|
||||
}
|
||||
|
||||
@@ -1098,16 +1120,15 @@ div.where {
|
||||
nav.sub {
|
||||
flex-grow: 1;
|
||||
flex-flow: row nowrap;
|
||||
margin: 4px 0 0 0;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
align-items: start;
|
||||
margin-top: 4px;
|
||||
}
|
||||
.search-form {
|
||||
position: relative;
|
||||
display: flex;
|
||||
height: 34px;
|
||||
flex-grow: 1;
|
||||
margin-bottom: 4px;
|
||||
}
|
||||
.src nav.sub {
|
||||
margin: 0 0 -10px 0;
|
||||
@@ -1208,27 +1229,14 @@ table,
|
||||
margin-left: 0;
|
||||
}
|
||||
|
||||
.search-results-title {
|
||||
margin-top: 0;
|
||||
white-space: nowrap;
|
||||
/* flex layout allows shrinking the <select> appropriately if it becomes too large */
|
||||
display: flex;
|
||||
/* make things look like in a line, despite the fact that we're using a layout
|
||||
with boxes (i.e. from the flex layout) */
|
||||
align-items: baseline;
|
||||
}
|
||||
.search-results-title + .sub-heading {
|
||||
color: var(--main-color);
|
||||
display: flex;
|
||||
align-items: baseline;
|
||||
white-space: nowrap;
|
||||
}
|
||||
#crate-search-div {
|
||||
/* ensures that 100% in properties of #crate-search-div:after
|
||||
are relative to the size of this div */
|
||||
position: relative;
|
||||
/* allows this div (and with it the <select>-element "#crate-search") to be shrunk */
|
||||
min-width: 0;
|
||||
/* keep label text for switcher from moving down when this appears */
|
||||
margin-top: -1px;
|
||||
}
|
||||
#crate-search {
|
||||
padding: 0 23px 0 4px;
|
||||
@@ -1294,6 +1302,7 @@ so that we can apply CSS-filters to change the arrow color in themes */
|
||||
flex-grow: 1;
|
||||
background-color: var(--button-background-color);
|
||||
color: var(--search-color);
|
||||
max-width: 100%;
|
||||
}
|
||||
.search-input:focus {
|
||||
border-color: var(--search-input-focused-border-color);
|
||||
@@ -1459,14 +1468,14 @@ so that we can apply CSS-filters to change the arrow color in themes */
|
||||
}
|
||||
|
||||
#settings.popover {
|
||||
--popover-arrow-offset: 202px;
|
||||
--popover-arrow-offset: 196px;
|
||||
top: calc(100% - 16px);
|
||||
}
|
||||
|
||||
/* use larger max-width for help popover, but not for help.html */
|
||||
#help.popover {
|
||||
max-width: 600px;
|
||||
--popover-arrow-offset: 118px;
|
||||
--popover-arrow-offset: 115px;
|
||||
top: calc(100% - 16px);
|
||||
}
|
||||
|
||||
@@ -1929,10 +1938,12 @@ a.tooltip:hover::after {
|
||||
color: inherit;
|
||||
}
|
||||
#search-tabs button:not(.selected) {
|
||||
--search-tab-button-background: var(--search-tab-button-not-selected-background);
|
||||
background-color: var(--search-tab-button-not-selected-background);
|
||||
border-top-color: var(--search-tab-button-not-selected-border-top-color);
|
||||
}
|
||||
#search-tabs button:hover, #search-tabs button.selected {
|
||||
--search-tab-button-background: var(--search-tab-button-selected-background);
|
||||
background-color: var(--search-tab-button-selected-background);
|
||||
border-top-color: var(--search-tab-button-selected-border-top-color);
|
||||
}
|
||||
@@ -1941,6 +1952,73 @@ a.tooltip:hover::after {
|
||||
font-size: 1rem;
|
||||
font-variant-numeric: tabular-nums;
|
||||
color: var(--search-tab-title-count-color);
|
||||
position: relative;
|
||||
}
|
||||
|
||||
#search-tabs .count.loading {
|
||||
color: transparent;
|
||||
}
|
||||
|
||||
.search-form.loading {
|
||||
--search-tab-button-background: var(--button-background-color);
|
||||
}
|
||||
|
||||
#search-tabs .count.loading::before,
|
||||
.search-form.loading::before
|
||||
{
|
||||
width: 16px;
|
||||
height: 16px;
|
||||
border-radius: 16px;
|
||||
background: radial-gradient(
|
||||
var(--search-tab-button-background) 0 50%,
|
||||
transparent 50% 100%
|
||||
), conic-gradient(
|
||||
var(--code-highlight-kw-color) 0deg 30deg,
|
||||
var(--code-highlight-prelude-color) 30deg 60deg,
|
||||
var(--code-highlight-number-color) 90deg 120deg,
|
||||
var(--code-highlight-lifetime-color ) 120deg 150deg,
|
||||
var(--code-highlight-comment-color) 150deg 180deg,
|
||||
var(--code-highlight-self-color) 180deg 210deg,
|
||||
var(--code-highlight-attribute-color) 210deg 240deg,
|
||||
var(--code-highlight-literal-color) 210deg 240deg,
|
||||
var(--code-highlight-macro-color) 240deg 270deg,
|
||||
var(--code-highlight-question-mark-color) 270deg 300deg,
|
||||
var(--code-highlight-prelude-val-color) 300deg 330deg,
|
||||
var(--code-highlight-doc-comment-color) 330deg 360deg
|
||||
);
|
||||
content: "";
|
||||
position: absolute;
|
||||
left: 2px;
|
||||
top: 2px;
|
||||
animation: rotating 1.25s linear infinite;
|
||||
}
|
||||
#search-tabs .count.loading::after,
|
||||
.search-form.loading::after
|
||||
{
|
||||
width: 18px;
|
||||
height: 18px;
|
||||
border-radius: 18px;
|
||||
background: conic-gradient(
|
||||
var(--search-tab-button-background) 0deg 180deg,
|
||||
transparent 270deg 360deg
|
||||
);
|
||||
content: "";
|
||||
position: absolute;
|
||||
left: 1px;
|
||||
top: 1px;
|
||||
animation: rotating 0.66s linear infinite;
|
||||
}
|
||||
|
||||
.search-form.loading::before {
|
||||
left: auto;
|
||||
right: 9px;
|
||||
top: 8px;
|
||||
}
|
||||
|
||||
.search-form.loading::after {
|
||||
left: auto;
|
||||
right: 8px;
|
||||
top: 8px;
|
||||
}
|
||||
|
||||
#search .error code {
|
||||
@@ -1974,7 +2052,7 @@ a.tooltip:hover::after {
|
||||
border-bottom: 1px solid var(--border-color);
|
||||
}
|
||||
|
||||
#settings-menu, #help-button, button#toggle-all-docs {
|
||||
#search-button, .settings-menu, .help-menu, button#toggle-all-docs {
|
||||
margin-left: var(--button-left-margin);
|
||||
display: flex;
|
||||
line-height: 1.25;
|
||||
@@ -1989,69 +2067,100 @@ a.tooltip:hover::after {
|
||||
display: flex;
|
||||
margin-right: 4px;
|
||||
position: fixed;
|
||||
margin-top: 25px;
|
||||
left: 6px;
|
||||
height: 34px;
|
||||
width: 34px;
|
||||
z-index: calc(var(--desktop-sidebar-z-index) + 1);
|
||||
}
|
||||
.hide-sidebar #sidebar-button {
|
||||
left: 6px;
|
||||
background-color: var(--main-background-color);
|
||||
z-index: 1;
|
||||
}
|
||||
.src #sidebar-button {
|
||||
margin-top: 0;
|
||||
top: 8px;
|
||||
left: 8px;
|
||||
z-index: calc(var(--desktop-sidebar-z-index) + 1);
|
||||
border-color: var(--border-color);
|
||||
}
|
||||
.hide-sidebar .src #sidebar-button {
|
||||
position: static;
|
||||
}
|
||||
#settings-menu > a, #help-button > a, #sidebar-button > a, button#toggle-all-docs {
|
||||
#search-button > a,
|
||||
.settings-menu > a,
|
||||
.help-menu > a,
|
||||
#sidebar-button > a,
|
||||
button#toggle-all-docs {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
flex-direction: column;
|
||||
}
|
||||
#settings-menu > a, #help-button > a, button#toggle-all-docs {
|
||||
#search-button > a,
|
||||
.settings-menu > a,
|
||||
.help-menu > a,
|
||||
button#toggle-all-docs {
|
||||
border: 1px solid transparent;
|
||||
border-radius: var(--button-border-radius);
|
||||
color: var(--main-color);
|
||||
}
|
||||
#settings-menu > a, #help-button > a, button#toggle-all-docs {
|
||||
#search-button > a, .settings-menu > a, .help-menu > a, button#toggle-all-docs {
|
||||
width: 80px;
|
||||
border-radius: var(--toolbar-button-border-radius);
|
||||
}
|
||||
#settings-menu > a, #help-button > a {
|
||||
#search-button > a, .settings-menu > a, .help-menu > a {
|
||||
min-width: 0;
|
||||
}
|
||||
#sidebar-button > a {
|
||||
background-color: var(--sidebar-background-color);
|
||||
border: solid 1px transparent;
|
||||
border-radius: var(--button-border-radius);
|
||||
background-color: var(--button-background-color);
|
||||
width: 33px;
|
||||
}
|
||||
#sidebar-button > a:hover, #sidebar-button > a:focus-visible {
|
||||
background-color: var(--main-background-color);
|
||||
.src #sidebar-button > a {
|
||||
background-color: var(--sidebar-background-color);
|
||||
border-color: var(--border-color);
|
||||
}
|
||||
|
||||
#settings-menu > a:hover, #settings-menu > a:focus-visible,
|
||||
#help-button > a:hover, #help-button > a:focus-visible,
|
||||
#search-button > a:hover, #search-button > a:focus-visible,
|
||||
.settings-menu > a:hover, .settings-menu > a:focus-visible,
|
||||
.help-menu > a:hover, #help-menu > a:focus-visible,
|
||||
#sidebar-button > a:hover, #sidebar-button > a:focus-visible,
|
||||
#copy-path:hover, #copy-path:focus-visible,
|
||||
button#toggle-all-docs:hover, button#toggle-all-docs:focus-visible {
|
||||
border-color: var(--settings-button-border-focus);
|
||||
text-decoration: none;
|
||||
}
|
||||
|
||||
#settings-menu > a::before {
|
||||
#search-button > a::before {
|
||||
/* Magnifying glass */
|
||||
content: url('data:image/svg+xml,<svg xmlns="http://www.w3.org/2000/svg" \
|
||||
width="18" height="18" viewBox="0 0 16 16">\
|
||||
<circle r="5" cy="7" cx="7" style="fill:none;stroke:black;stroke-width:3"/><path \
|
||||
d="M14.5,14.5 12,12" style="fill:none;stroke:black;stroke-width:3;stroke-linecap:round">\
|
||||
</path><desc>Search</desc>\
|
||||
</svg>');
|
||||
width: 18px;
|
||||
height: 18px;
|
||||
filter: var(--settings-menu-filter);
|
||||
}
|
||||
|
||||
.settings-menu > a::before {
|
||||
/* Wheel <https://www.svgrepo.com/svg/384069/settings-cog-gear> */
|
||||
content: url('data:image/svg+xml,<svg width="18" height="18" viewBox="0 0 12 12" \
|
||||
enable-background="new 0 0 12 12" xmlns="http://www.w3.org/2000/svg">\
|
||||
<path d="M10.25,6c0-0.1243286-0.0261841-0.241333-0.0366211-0.362915l1.6077881-1.5545654l\
|
||||
-1.25-2.1650391 c0,0-1.2674561,0.3625488-2.1323853,0.6099854c-0.2034912-0.1431885-0.421875\
|
||||
-0.2639771-0.6494751-0.3701782L7.25,0h-2.5 c0,0-0.3214111,1.2857666-0.5393066,2.1572876\
|
||||
C3.9830933,2.2634888,3.7647095,2.3842773,3.5612183,2.5274658L1.428833,1.9174805 \
|
||||
l-1.25,2.1650391c0,0,0.9641113,0.9321899,1.6077881,1.5545654C1.7761841,5.758667,\
|
||||
1.75,5.8756714,1.75,6 s0.0261841,0.241333,0.0366211,0.362915L0.178833,7.9174805l1.25,\
|
||||
2.1650391l2.1323853-0.6099854 c0.2034912,0.1432495,0.421875,0.2639771,0.6494751,0.3701782\
|
||||
L4.75,12h2.5l0.5393066-2.1572876 c0.2276001-0.1062012,0.4459839-0.2269287,0.6494751\
|
||||
-0.3701782l2.1323853,0.6099854l1.25-2.1650391L10.2133789,6.362915 C10.2238159,6.241333,\
|
||||
10.25,6.1243286,10.25,6z M6,7.5C5.1715698,7.5,4.5,6.8284302,4.5,6S5.1715698,4.5,6,4.5S7.5\
|
||||
,5.1715698,7.5,6 S6.8284302,7.5,6,7.5z" fill="black"/></svg>');
|
||||
<path d="m4.75 0s-0.32117 1.286-0.53906 2.1576c-0.2276 0.1062-0.44625 \
|
||||
0.2266-0.64974 0.36979l-2.1328-0.60938-1.25 2.1641s0.9644 0.93231 1.6081 1.5547c-0.010437 \
|
||||
0.12158-0.036458 0.23895-0.036458 0.36328s0.026021 0.2417 0.036458 0.36328l-1.6081 \
|
||||
1.5547 1.25 2.1641 2.1328-0.60937c0.20349 0.14325 0.42214 0.26359 0.64974 0.36979l0.53906 \
|
||||
2.1576h2.5l0.53906-2.1576c0.2276-0.1062 0.44625-0.22654 0.64974-0.36979l2.1328 0.60937 \
|
||||
1.25-2.1641-1.6081-1.5547c0.010437-0.12158 0.036458-0.23895 \
|
||||
0.036458-0.36328s-0.02602-0.2417-0.03646-0.36328l1.6081-1.5547-1.25-2.1641s-1.2679 \
|
||||
0.36194-2.1328 0.60938c-0.20349-0.14319-0.42214-0.26359-0.64974-0.36979l-0.53906-2.1576\
|
||||
zm1.25 2.5495c1.9058-2.877e-4 3.4508 1.5447 3.4505 3.4505 2.877e-4 1.9058-1.5447 3.4508-3.4505 \
|
||||
3.4505-1.9058 2.877e-4 -3.4508-1.5447-3.4505-3.4505-2.877e-4 -1.9058 1.5447-3.4508 \
|
||||
3.4505-3.4505z" fill="black"/>\
|
||||
<circle cx="6" cy="6" r="1.75" fill="none" stroke="black" stroke-width="1"/></svg>');
|
||||
width: 18px;
|
||||
height: 18px;
|
||||
filter: var(--settings-menu-filter);
|
||||
@@ -2067,36 +2176,51 @@ button#toggle-all-docs::before {
|
||||
filter: var(--settings-menu-filter);
|
||||
}
|
||||
|
||||
button#toggle-all-docs.will-expand::before {
|
||||
/* Custom arrow icon */
|
||||
content: url('data:image/svg+xml,<svg width="18" height="18" viewBox="0 0 12 12" \
|
||||
enable-background="new 0 0 12 12" xmlns="http://www.w3.org/2000/svg">\
|
||||
<path d="M2,5l4,-4l4,4M2,7l4,4l4,-4" stroke="black" fill="none" stroke-width="2px"/></svg>');
|
||||
}
|
||||
|
||||
#help-button > a::before {
|
||||
/* Question mark with circle */
|
||||
content: url('data:image/svg+xml,<svg width="18" height="18" viewBox="0 0 12 12" \
|
||||
enable-background="new 0 0 12 12" xmlns="http://www.w3.org/2000/svg" fill="none">\
|
||||
<circle r="5.25" cx="6" cy="6" stroke-width="1.25" stroke="black"/>\
|
||||
<text x="6" y="7" style="font:8px sans-serif;font-weight:1000" text-anchor="middle" \
|
||||
dominant-baseline="middle" fill="black">?</text></svg>');
|
||||
.help-menu > a::before {
|
||||
/* Question mark with "circle" */
|
||||
content: url('data:image/svg+xml,\
|
||||
<svg width="18" height="18" enable-background="new 0 0 12 12" fill="none" \
|
||||
version="1.1" viewBox="0 0 12 12" xmlns="http://www.w3.org/2000/svg"> \
|
||||
<path d="m6.007 0.6931c2.515 0 5.074 1.908 5.074 5.335 0 3.55-2.567 5.278-5.088 \
|
||||
5.278-2.477 0-5.001-1.742-5.001-5.3 0-3.38 2.527-5.314 5.014-5.314z" stroke="black" \
|
||||
stroke-width="1.5"/>\
|
||||
<path d="m5.999 7.932c0.3111 0 0.7062 0.2915 0.7062 0.7257 0 0.5458-0.3951 \
|
||||
0.8099-0.7081 0.8099-0.2973 0-0.7023-0.266-0.7023-0.7668 0-0.4695 0.3834-0.7688 \
|
||||
0.7042-0.7688z" fill="black"/>\
|
||||
<path d="m4.281 3.946c0.0312-0.03057 0.06298-0.06029 0.09528-0.08916 0.4833-0.432 1.084-0.6722 \
|
||||
1.634-0.6722 1.141 0 1.508 1.043 1.221 1.621-0.2753 0.5542-1.061 0.5065-1.273 \
|
||||
1.595-0.05728 0.2939 0.0134 0.9812 0.0134 1.205" fill="none" stroke="black" \
|
||||
stroke-width="1.25"/>\
|
||||
</svg>');
|
||||
width: 18px;
|
||||
height: 18px;
|
||||
filter: var(--settings-menu-filter);
|
||||
}
|
||||
|
||||
/* design hack to cope with "Help" being far shorter than "Settings" etc */
|
||||
.help-menu > a {
|
||||
width: 74px;
|
||||
}
|
||||
.help-menu > a > .label {
|
||||
padding-right: 1px;
|
||||
}
|
||||
#toggle-all-docs:not(.will-expand) > .label {
|
||||
padding-left: 1px;
|
||||
}
|
||||
|
||||
#search-button > a::before,
|
||||
button#toggle-all-docs::before,
|
||||
#help-button > a::before,
|
||||
#settings-menu > a::before {
|
||||
.help-menu > a::before,
|
||||
.settings-menu > a::before {
|
||||
filter: var(--settings-menu-filter);
|
||||
margin: 8px;
|
||||
}
|
||||
|
||||
@media not (pointer: coarse) {
|
||||
#search-button > a:hover::before,
|
||||
button#toggle-all-docs:hover::before,
|
||||
#help-button > a:hover::before,
|
||||
#settings-menu > a:hover::before {
|
||||
.help-menu > a:hover::before,
|
||||
.settings-menu > a:hover::before {
|
||||
filter: var(--settings-menu-hover-filter);
|
||||
}
|
||||
}
|
||||
@@ -2122,9 +2246,9 @@ rustdoc-toolbar span.label {
|
||||
/* sidebar resizer image */
|
||||
content: url('data:image/svg+xml,<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 22 22" \
|
||||
fill="none" stroke="black">\
|
||||
<rect x="1" y="1" width="20" height="20" ry="1.5" stroke-width="1.5" stroke="%23777"/>\
|
||||
<circle cx="4.375" cy="4.375" r="1" stroke-width=".75"/>\
|
||||
<path d="m7.6121 3v16 M5.375 7.625h-2 m2 3h-2 m2 3h-2" stroke-width="1.25"/></svg>');
|
||||
<rect x="1" y="2" width="20" height="18" ry="1.5" stroke-width="1.5" stroke="%23777"/>\
|
||||
<circle cx="4.375" cy="5.375" r="1" stroke-width=".75"/>\
|
||||
<path d="m7.6121 4v14 M5.375 8.625h-2 m2 3h-2 m2 3h-2" stroke-width="1.25"/></svg>');
|
||||
width: 22px;
|
||||
height: 22px;
|
||||
}
|
||||
@@ -2137,7 +2261,8 @@ rustdoc-toolbar span.label {
|
||||
margin-left: 10px;
|
||||
padding: 0;
|
||||
padding-left: 2px;
|
||||
border: 0;
|
||||
border: solid 1px transparent;
|
||||
border-radius: var(--button-border-radius);
|
||||
font-size: 0;
|
||||
}
|
||||
#copy-path::before {
|
||||
@@ -2159,7 +2284,7 @@ rustdoc-toolbar span.label {
|
||||
transform: rotate(360deg);
|
||||
}
|
||||
}
|
||||
#settings-menu.rotate > a img {
|
||||
.settings-menu.rotate > a img {
|
||||
animation: rotating 2s linear infinite;
|
||||
}
|
||||
|
||||
@@ -2402,6 +2527,9 @@ However, it's not needed with smaller screen width because the doc/code block is
|
||||
opacity: 0.75;
|
||||
filter: var(--mobile-sidebar-menu-filter);
|
||||
}
|
||||
.src #sidebar-button > a:hover {
|
||||
background: var(--main-background-color);
|
||||
}
|
||||
.sidebar-menu-toggle:hover::before,
|
||||
.sidebar-menu-toggle:active::before,
|
||||
.sidebar-menu-toggle:focus::before {
|
||||
@@ -2410,8 +2538,8 @@ However, it's not needed with smaller screen width because the doc/code block is
|
||||
|
||||
/* Media Queries */
|
||||
|
||||
/* Make sure all the buttons line wrap at the same time */
|
||||
@media (max-width: 850px) {
|
||||
/* Make sure all the buttons line wrap at the same time */
|
||||
#search-tabs .count {
|
||||
display: block;
|
||||
}
|
||||
@@ -2421,6 +2549,81 @@ However, it's not needed with smaller screen width because the doc/code block is
|
||||
.side-by-side > div {
|
||||
width: auto;
|
||||
}
|
||||
|
||||
/* Text label takes up too much space at this size. */
|
||||
.main-heading {
|
||||
grid-template-areas:
|
||||
"main-heading-breadcrumbs main-heading-toolbar"
|
||||
"main-heading-h1 main-heading-toolbar"
|
||||
"main-heading-sub-heading main-heading-toolbar";
|
||||
}
|
||||
.search-results-main-heading {
|
||||
display: grid;
|
||||
grid-template-areas:
|
||||
"main-heading-breadcrumbs main-heading-toolbar"
|
||||
"main-heading-breadcrumbs main-heading-toolbar"
|
||||
"main-heading-h1 main-heading-toolbar";
|
||||
}
|
||||
rustdoc-toolbar {
|
||||
margin-top: -10px;
|
||||
display: grid;
|
||||
grid-template-areas:
|
||||
"x settings help"
|
||||
"search summary summary";
|
||||
grid-template-rows: 35px 1fr;
|
||||
}
|
||||
.search-results-main-heading rustdoc-toolbar {
|
||||
display: grid;
|
||||
grid-template-areas:
|
||||
"settings help"
|
||||
"search search";
|
||||
}
|
||||
.search-results-main-heading #toggle-all-docs {
|
||||
display: none;
|
||||
}
|
||||
rustdoc-toolbar .settings-menu span.label,
|
||||
rustdoc-toolbar .help-menu span.label
|
||||
{
|
||||
display: none;
|
||||
}
|
||||
rustdoc-toolbar .settings-menu {
|
||||
grid-area: settings;
|
||||
}
|
||||
rustdoc-toolbar .help-menu {
|
||||
grid-area: help;
|
||||
}
|
||||
rustdoc-toolbar .settings-menu {
|
||||
grid-area: settings;
|
||||
}
|
||||
rustdoc-toolbar #search-button {
|
||||
grid-area: search;
|
||||
}
|
||||
rustdoc-toolbar #toggle-all-docs {
|
||||
grid-area: summary;
|
||||
}
|
||||
rustdoc-toolbar .settings-menu,
|
||||
rustdoc-toolbar .help-menu {
|
||||
height: 35px;
|
||||
}
|
||||
rustdoc-toolbar .settings-menu > a,
|
||||
rustdoc-toolbar .help-menu > a {
|
||||
border-radius: 2px;
|
||||
text-align: center;
|
||||
width: 34px;
|
||||
padding: 5px 0;
|
||||
}
|
||||
rustdoc-toolbar .settings-menu > a:before,
|
||||
rustdoc-toolbar .help-menu > a:before {
|
||||
margin: 0 4px;
|
||||
}
|
||||
#settings.popover {
|
||||
top: 16px;
|
||||
--popover-arrow-offset: 58px;
|
||||
}
|
||||
#help.popover {
|
||||
top: 16px;
|
||||
--popover-arrow-offset: 16px;
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -2435,7 +2638,7 @@ in src-script.js and main.js
|
||||
|
||||
/* When linking to an item with an `id` (for instance, by clicking a link in the sidebar,
|
||||
or visiting a URL with a fragment like `#method.new`, we don't want the item to be obscured
|
||||
by the topbar. Anything with an `id` gets scroll-margin-top equal to .mobile-topbar's size.
|
||||
by the topbar. Anything with an `id` gets scroll-margin-top equal to rustdoc-topbar's size.
|
||||
*/
|
||||
*[id] {
|
||||
scroll-margin-top: 45px;
|
||||
@@ -2451,18 +2654,32 @@ in src-script.js and main.js
|
||||
visibility: hidden;
|
||||
}
|
||||
|
||||
/* Text label takes up too much space at this size. */
|
||||
rustdoc-toolbar span.label {
|
||||
|
||||
/* Pull settings and help up into the top bar. */
|
||||
rustdoc-topbar span.label,
|
||||
html:not(.hide-sidebar) .rustdoc:not(.src) rustdoc-toolbar .settings-menu > a,
|
||||
html:not(.hide-sidebar) .rustdoc:not(.src) rustdoc-toolbar .help-menu > a
|
||||
{
|
||||
display: none;
|
||||
}
|
||||
#settings-menu > a, #help-button > a, button#toggle-all-docs {
|
||||
rustdoc-topbar .settings-menu > a,
|
||||
rustdoc-topbar .help-menu > a {
|
||||
width: 33px;
|
||||
line-height: 0;
|
||||
}
|
||||
rustdoc-topbar .settings-menu > a:hover,
|
||||
rustdoc-topbar .help-menu > a:hover {
|
||||
border: none;
|
||||
background: var(--main-background-color);
|
||||
border-radius: 0;
|
||||
}
|
||||
#settings.popover {
|
||||
--popover-arrow-offset: 86px;
|
||||
top: 32px;
|
||||
--popover-arrow-offset: 48px;
|
||||
}
|
||||
#help.popover {
|
||||
--popover-arrow-offset: 48px;
|
||||
top: 32px;
|
||||
--popover-arrow-offset: 12px;
|
||||
}
|
||||
|
||||
.rustdoc {
|
||||
@@ -2471,13 +2688,13 @@ in src-script.js and main.js
|
||||
display: block;
|
||||
}
|
||||
|
||||
main {
|
||||
html:not(.hide-sidebar) main {
|
||||
padding-left: 15px;
|
||||
padding-top: 0px;
|
||||
}
|
||||
|
||||
/* Hide the logo and item name from the sidebar. Those are displayed
|
||||
in the mobile-topbar instead. */
|
||||
in the rustdoc-topbar instead. */
|
||||
.sidebar .logo-container,
|
||||
.sidebar .location,
|
||||
.sidebar-resizer {
|
||||
@@ -2510,6 +2727,9 @@ in src-script.js and main.js
|
||||
height: 100vh;
|
||||
border: 0;
|
||||
}
|
||||
html .src main {
|
||||
padding: 18px 0;
|
||||
}
|
||||
.src .search-form {
|
||||
margin-left: 40px;
|
||||
}
|
||||
@@ -2529,9 +2749,9 @@ in src-script.js and main.js
|
||||
left: 0;
|
||||
}
|
||||
|
||||
.mobile-topbar h2 {
|
||||
rustdoc-topbar > h2 {
|
||||
padding-bottom: 0;
|
||||
margin: auto 0.5em auto auto;
|
||||
margin: auto;
|
||||
overflow: hidden;
|
||||
/* Rare exception to specifying font sizes in rem. Since the topbar
|
||||
height is specified in pixels, this also has to be specified in
|
||||
@@ -2540,32 +2760,34 @@ in src-script.js and main.js
|
||||
font-size: 24px;
|
||||
white-space: nowrap;
|
||||
text-overflow: ellipsis;
|
||||
text-align: center;
|
||||
}
|
||||
|
||||
.mobile-topbar .logo-container > img {
|
||||
rustdoc-topbar .logo-container > img {
|
||||
max-width: 35px;
|
||||
max-height: 35px;
|
||||
margin: 5px 0 5px 20px;
|
||||
}
|
||||
|
||||
.mobile-topbar {
|
||||
rustdoc-topbar {
|
||||
display: flex;
|
||||
flex-direction: row;
|
||||
position: sticky;
|
||||
z-index: 10;
|
||||
font-size: 2rem;
|
||||
height: 45px;
|
||||
width: 100%;
|
||||
left: 0;
|
||||
top: 0;
|
||||
}
|
||||
|
||||
.hide-sidebar .mobile-topbar {
|
||||
.hide-sidebar rustdoc-topbar {
|
||||
display: none;
|
||||
}
|
||||
|
||||
.sidebar-menu-toggle {
|
||||
width: 45px;
|
||||
/* prevent flexbox shrinking */
|
||||
width: 41px;
|
||||
min-width: 41px;
|
||||
border: none;
|
||||
line-height: 0;
|
||||
}
|
||||
@@ -2591,9 +2813,13 @@ in src-script.js and main.js
|
||||
#sidebar-button > a::before {
|
||||
content: url('data:image/svg+xml,<svg xmlns="http://www.w3.org/2000/svg" \
|
||||
viewBox="0 0 22 22" fill="none" stroke="black">\
|
||||
<rect x="1" y="1" width="20" height="20" ry="1.5" stroke-width="1.5" stroke="%23777"/>\
|
||||
<circle cx="4.375" cy="4.375" r="1" stroke-width=".75"/>\
|
||||
<path d="m3 7.375h16m0-3h-4" stroke-width="1.25"/></svg>');
|
||||
<rect x="1" y="2" width="20" height="18" ry="1.5" stroke-width="1.5" stroke="%23777"/>\
|
||||
<g fill="black" stroke="none">\
|
||||
<circle cx="4.375" cy="5.375" r="1" stroke-width=".75"/>\
|
||||
<circle cx="17.375" cy="5.375" r="1" stroke-width=".75"/>\
|
||||
<circle cx="14.375" cy="5.375" r="1" stroke-width=".75"/>\
|
||||
</g>\
|
||||
<path d="m3 8.375h16" stroke-width="1.25"/></svg>');
|
||||
width: 22px;
|
||||
height: 22px;
|
||||
}
|
||||
@@ -3283,7 +3509,7 @@ Original by Dempfi (https://github.com/dempfi/ayu)
|
||||
border-bottom: 1px solid rgba(242, 151, 24, 0.3);
|
||||
}
|
||||
|
||||
:root[data-theme="ayu"] #settings-menu > a img,
|
||||
:root[data-theme="ayu"] .settings-menu > a img,
|
||||
:root[data-theme="ayu"] #sidebar-button > a::before {
|
||||
filter: invert(100);
|
||||
}
|
||||
|
||||
@@ -54,23 +54,6 @@ function showMain() {
|
||||
window.rootPath = getVar("root-path");
|
||||
window.currentCrate = getVar("current-crate");
|
||||
|
||||
function setMobileTopbar() {
|
||||
// FIXME: It would be nicer to generate this text content directly in HTML,
|
||||
// but with the current code it's hard to get the right information in the right place.
|
||||
const mobileTopbar = document.querySelector(".mobile-topbar");
|
||||
const locationTitle = document.querySelector(".sidebar h2.location");
|
||||
if (mobileTopbar) {
|
||||
const mobileTitle = document.createElement("h2");
|
||||
mobileTitle.className = "location";
|
||||
if (hasClass(document.querySelector(".rustdoc"), "crate")) {
|
||||
mobileTitle.innerHTML = `Crate <a href="#">${window.currentCrate}</a>`;
|
||||
} else if (locationTitle) {
|
||||
mobileTitle.innerHTML = locationTitle.innerHTML;
|
||||
}
|
||||
mobileTopbar.appendChild(mobileTitle);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the human-readable string for the virtual-key code of the
|
||||
* given KeyboardEvent, ev.
|
||||
@@ -84,6 +67,7 @@ function setMobileTopbar() {
|
||||
* So I guess you could say things are getting pretty interoperable.
|
||||
*
|
||||
* @param {KeyboardEvent} ev
|
||||
* @returns {string}
|
||||
*/
|
||||
function getVirtualKey(ev) {
|
||||
if ("key" in ev && typeof ev.key !== "undefined") {
|
||||
@@ -98,18 +82,8 @@ function getVirtualKey(ev) {
|
||||
}
|
||||
|
||||
const MAIN_ID = "main-content";
|
||||
const SETTINGS_BUTTON_ID = "settings-menu";
|
||||
const ALTERNATIVE_DISPLAY_ID = "alternative-display";
|
||||
const NOT_DISPLAYED_ID = "not-displayed";
|
||||
const HELP_BUTTON_ID = "help-button";
|
||||
|
||||
function getSettingsButton() {
|
||||
return document.getElementById(SETTINGS_BUTTON_ID);
|
||||
}
|
||||
|
||||
function getHelpButton() {
|
||||
return document.getElementById(HELP_BUTTON_ID);
|
||||
}
|
||||
|
||||
// Returns the current URL without any query parameter or hash.
|
||||
function getNakedUrl() {
|
||||
@@ -174,7 +148,7 @@ function getNotDisplayedElem() {
|
||||
* contains the displayed element (there can be only one at the same time!). So basically, we switch
|
||||
* elements between the two `<section>` elements.
|
||||
*
|
||||
* @param {HTMLElement|null} elemToDisplay
|
||||
* @param {Element|null} elemToDisplay
|
||||
*/
|
||||
function switchDisplayedElement(elemToDisplay) {
|
||||
const el = getAlternativeDisplayElem();
|
||||
@@ -239,14 +213,14 @@ function preLoadCss(cssUrl) {
|
||||
document.head.append(script);
|
||||
}
|
||||
|
||||
const settingsButton = getSettingsButton();
|
||||
if (settingsButton) {
|
||||
settingsButton.onclick = event => {
|
||||
onEachLazy(document.querySelectorAll(".settings-menu"), settingsMenu => {
|
||||
/** @param {MouseEvent} event */
|
||||
settingsMenu.querySelector("a").onclick = event => {
|
||||
if (event.ctrlKey || event.altKey || event.metaKey) {
|
||||
return;
|
||||
}
|
||||
window.hideAllModals(false);
|
||||
addClass(getSettingsButton(), "rotate");
|
||||
addClass(settingsMenu, "rotate");
|
||||
event.preventDefault();
|
||||
// Sending request for the CSS and the JS files at the same time so it will
|
||||
// hopefully be loaded when the JS will generate the settings content.
|
||||
@@ -268,15 +242,42 @@ function preLoadCss(cssUrl) {
|
||||
}
|
||||
}, 0);
|
||||
};
|
||||
}
|
||||
});
|
||||
|
||||
window.searchState = {
|
||||
rustdocToolbar: document.querySelector("rustdoc-toolbar"),
|
||||
loadingText: "Loading search results...",
|
||||
// This will always be an HTMLInputElement, but tsc can't see that
|
||||
// @ts-expect-error
|
||||
input: document.getElementsByClassName("search-input")[0],
|
||||
outputElement: () => {
|
||||
inputElement: () => {
|
||||
let el = document.getElementsByClassName("search-input")[0];
|
||||
if (!el) {
|
||||
const out = nonnull(nonnull(window.searchState.outputElement()).parentElement);
|
||||
const hdr = document.createElement("div");
|
||||
hdr.className = "main-heading search-results-main-heading";
|
||||
const params = window.searchState.getQueryStringParams();
|
||||
const autofocusParam = params.search === "" ? "autofocus" : "";
|
||||
hdr.innerHTML = `<nav class="sub">
|
||||
<form class="search-form loading">
|
||||
<span></span> <!-- This empty span is a hacky fix for Safari: see #93184 -->
|
||||
<input
|
||||
${autofocusParam}
|
||||
class="search-input"
|
||||
name="search"
|
||||
aria-label="Run search in the documentation"
|
||||
autocomplete="off"
|
||||
spellcheck="false"
|
||||
placeholder="Type ‘S’ or ‘/’ to search, ‘?’ for more options…"
|
||||
type="search">
|
||||
</form>
|
||||
</nav><div class="search-switcher"></div>`;
|
||||
out.insertBefore(hdr, window.searchState.outputElement());
|
||||
el = document.getElementsByClassName("search-input")[0];
|
||||
}
|
||||
if (el instanceof HTMLInputElement) {
|
||||
return el;
|
||||
}
|
||||
return null;
|
||||
},
|
||||
containerElement: () => {
|
||||
let el = document.getElementById("search");
|
||||
if (!el) {
|
||||
el = document.createElement("section");
|
||||
@@ -285,6 +286,19 @@ function preLoadCss(cssUrl) {
|
||||
}
|
||||
return el;
|
||||
},
|
||||
outputElement: () => {
|
||||
const container = window.searchState.containerElement();
|
||||
if (!container) {
|
||||
return null;
|
||||
}
|
||||
let el = container.querySelector(".search-out");
|
||||
if (!el) {
|
||||
el = document.createElement("div");
|
||||
el.className = "search-out";
|
||||
container.appendChild(el);
|
||||
}
|
||||
return el;
|
||||
},
|
||||
title: document.title,
|
||||
titleBeforeSearch: document.title,
|
||||
timeout: null,
|
||||
@@ -303,25 +317,52 @@ function preLoadCss(cssUrl) {
|
||||
}
|
||||
},
|
||||
isDisplayed: () => {
|
||||
const outputElement = window.searchState.outputElement();
|
||||
return !!outputElement &&
|
||||
!!outputElement.parentElement &&
|
||||
outputElement.parentElement.id === ALTERNATIVE_DISPLAY_ID;
|
||||
const container = window.searchState.containerElement();
|
||||
if (!container) {
|
||||
return false;
|
||||
}
|
||||
return !!container.parentElement && container.parentElement.id ===
|
||||
ALTERNATIVE_DISPLAY_ID;
|
||||
},
|
||||
// Sets the focus on the search bar at the top of the page
|
||||
focus: () => {
|
||||
window.searchState.input && window.searchState.input.focus();
|
||||
const inputElement = window.searchState.inputElement();
|
||||
window.searchState.showResults();
|
||||
if (inputElement) {
|
||||
inputElement.focus();
|
||||
// Avoid glitch if something focuses the search button after clicking.
|
||||
requestAnimationFrame(() => inputElement.focus());
|
||||
}
|
||||
},
|
||||
// Removes the focus from the search bar.
|
||||
defocus: () => {
|
||||
window.searchState.input && window.searchState.input.blur();
|
||||
nonnull(window.searchState.inputElement()).blur();
|
||||
},
|
||||
showResults: search => {
|
||||
if (search === null || typeof search === "undefined") {
|
||||
search = window.searchState.outputElement();
|
||||
toggle: () => {
|
||||
if (window.searchState.isDisplayed()) {
|
||||
window.searchState.defocus();
|
||||
window.searchState.hideResults();
|
||||
} else {
|
||||
window.searchState.focus();
|
||||
}
|
||||
switchDisplayedElement(search);
|
||||
},
|
||||
showResults: () => {
|
||||
document.title = window.searchState.title;
|
||||
if (window.searchState.isDisplayed()) {
|
||||
return;
|
||||
}
|
||||
const search = window.searchState.containerElement();
|
||||
switchDisplayedElement(search);
|
||||
const btn = document.querySelector("#search-button a");
|
||||
if (browserSupportsHistoryApi() && btn instanceof HTMLAnchorElement &&
|
||||
window.searchState.getQueryStringParams().search === undefined
|
||||
) {
|
||||
history.pushState(null, "", btn.href);
|
||||
}
|
||||
const btnLabel = document.querySelector("#search-button a span.label");
|
||||
if (btnLabel) {
|
||||
btnLabel.innerHTML = "Exit";
|
||||
}
|
||||
},
|
||||
removeQueryParameters: () => {
|
||||
// We change the document title.
|
||||
@@ -334,6 +375,10 @@ function preLoadCss(cssUrl) {
|
||||
switchDisplayedElement(null);
|
||||
// We also remove the query parameter from the URL.
|
||||
window.searchState.removeQueryParameters();
|
||||
const btnLabel = document.querySelector("#search-button a span.label");
|
||||
if (btnLabel) {
|
||||
btnLabel.innerHTML = "Search";
|
||||
}
|
||||
},
|
||||
getQueryStringParams: () => {
|
||||
/** @type {Object.<any, string>} */
|
||||
@@ -348,11 +393,11 @@ function preLoadCss(cssUrl) {
|
||||
return params;
|
||||
},
|
||||
setup: () => {
|
||||
const search_input = window.searchState.input;
|
||||
let searchLoaded = false;
|
||||
const search_input = window.searchState.inputElement();
|
||||
if (!search_input) {
|
||||
return;
|
||||
}
|
||||
let searchLoaded = false;
|
||||
// If you're browsing the nightly docs, the page might need to be refreshed for the
|
||||
// search to work because the hash of the JS scripts might have changed.
|
||||
function sendSearchForm() {
|
||||
@@ -363,21 +408,102 @@ function preLoadCss(cssUrl) {
|
||||
if (!searchLoaded) {
|
||||
searchLoaded = true;
|
||||
// @ts-expect-error
|
||||
loadScript(getVar("static-root-path") + getVar("search-js"), sendSearchForm);
|
||||
loadScript(resourcePath("search-index", ".js"), sendSearchForm);
|
||||
window.rr_ = data => {
|
||||
// @ts-expect-error
|
||||
window.searchIndex = data;
|
||||
};
|
||||
if (!window.StringdexOnload) {
|
||||
window.StringdexOnload = [];
|
||||
}
|
||||
window.StringdexOnload.push(() => {
|
||||
loadScript(
|
||||
// @ts-expect-error
|
||||
getVar("static-root-path") + getVar("search-js"),
|
||||
sendSearchForm,
|
||||
);
|
||||
});
|
||||
// @ts-expect-error
|
||||
loadScript(getVar("static-root-path") + getVar("stringdex-js"), sendSearchForm);
|
||||
loadScript(resourcePath("search.index/root", ".js"), sendSearchForm);
|
||||
}
|
||||
}
|
||||
|
||||
search_input.addEventListener("focus", () => {
|
||||
window.searchState.origPlaceholder = search_input.placeholder;
|
||||
search_input.placeholder = "Type your search here.";
|
||||
loadSearch();
|
||||
});
|
||||
|
||||
if (search_input.value !== "") {
|
||||
loadSearch();
|
||||
const btn = document.getElementById("search-button");
|
||||
if (btn) {
|
||||
btn.onclick = event => {
|
||||
if (event.ctrlKey || event.altKey || event.metaKey) {
|
||||
return;
|
||||
}
|
||||
event.preventDefault();
|
||||
window.searchState.toggle();
|
||||
loadSearch();
|
||||
};
|
||||
}
|
||||
|
||||
// Push and pop states are used to add search results to the browser
|
||||
// history.
|
||||
if (browserSupportsHistoryApi()) {
|
||||
// Store the previous <title> so we can revert back to it later.
|
||||
const previousTitle = document.title;
|
||||
|
||||
window.addEventListener("popstate", e => {
|
||||
const params = window.searchState.getQueryStringParams();
|
||||
// Revert to the previous title manually since the History
|
||||
// API ignores the title parameter.
|
||||
document.title = previousTitle;
|
||||
// Synchronize search bar with query string state and
|
||||
// perform the search. This will empty the bar if there's
|
||||
// nothing there, which lets you really go back to a
|
||||
// previous state with nothing in the bar.
|
||||
const inputElement = window.searchState.inputElement();
|
||||
if (params.search !== undefined && inputElement !== null) {
|
||||
loadSearch();
|
||||
inputElement.value = params.search;
|
||||
// Some browsers fire "onpopstate" for every page load
|
||||
// (Chrome), while others fire the event only when actually
|
||||
// popping a state (Firefox), which is why search() is
|
||||
// called both here and at the end of the startSearch()
|
||||
// function.
|
||||
e.preventDefault();
|
||||
window.searchState.showResults();
|
||||
if (params.search === "") {
|
||||
window.searchState.focus();
|
||||
}
|
||||
} else {
|
||||
// When browsing back from search results the main page
|
||||
// visibility must be reset.
|
||||
window.searchState.hideResults();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// This is required in firefox to avoid this problem: Navigating to a search result
|
||||
// with the keyboard, hitting enter, and then hitting back would take you back to
|
||||
// the doc page, rather than the search that should overlay it.
|
||||
// This was an interaction between the back-forward cache and our handlers
|
||||
// that try to sync state between the URL and the search input. To work around it,
|
||||
// do a small amount of re-init on page show.
|
||||
window.onpageshow = () => {
|
||||
const inputElement = window.searchState.inputElement();
|
||||
const qSearch = window.searchState.getQueryStringParams().search;
|
||||
if (qSearch !== undefined && inputElement !== null) {
|
||||
if (inputElement.value === "") {
|
||||
inputElement.value = qSearch;
|
||||
}
|
||||
window.searchState.showResults();
|
||||
if (qSearch === "") {
|
||||
loadSearch();
|
||||
window.searchState.focus();
|
||||
}
|
||||
} else {
|
||||
window.searchState.hideResults();
|
||||
}
|
||||
};
|
||||
|
||||
const params = window.searchState.getQueryStringParams();
|
||||
if (params.search !== undefined) {
|
||||
window.searchState.setLoadingSearch();
|
||||
@@ -386,13 +512,9 @@ function preLoadCss(cssUrl) {
|
||||
},
|
||||
setLoadingSearch: () => {
|
||||
const search = window.searchState.outputElement();
|
||||
if (!search) {
|
||||
return;
|
||||
}
|
||||
search.innerHTML = "<h3 class=\"search-loading\">" +
|
||||
window.searchState.loadingText +
|
||||
"</h3>";
|
||||
window.searchState.showResults(search);
|
||||
nonnull(search).innerHTML = "<h3 class=\"search-loading\">" +
|
||||
window.searchState.loadingText + "</h3>";
|
||||
window.searchState.showResults();
|
||||
},
|
||||
descShards: new Map(),
|
||||
loadDesc: async function({descShard, descIndex}) {
|
||||
@@ -1500,15 +1622,13 @@ function preLoadCss(cssUrl) {
|
||||
|
||||
// @ts-expect-error
|
||||
function helpBlurHandler(event) {
|
||||
// @ts-expect-error
|
||||
if (!getHelpButton().contains(document.activeElement) &&
|
||||
// @ts-expect-error
|
||||
!getHelpButton().contains(event.relatedTarget) &&
|
||||
// @ts-expect-error
|
||||
!getSettingsButton().contains(document.activeElement) &&
|
||||
// @ts-expect-error
|
||||
!getSettingsButton().contains(event.relatedTarget)
|
||||
) {
|
||||
const isInPopover = onEachLazy(
|
||||
document.querySelectorAll(".settings-menu, .help-menu"),
|
||||
menu => {
|
||||
return menu.contains(document.activeElement) || menu.contains(event.relatedTarget);
|
||||
},
|
||||
);
|
||||
if (!isInPopover) {
|
||||
window.hidePopoverMenus();
|
||||
}
|
||||
}
|
||||
@@ -1571,10 +1691,9 @@ function preLoadCss(cssUrl) {
|
||||
|
||||
const container = document.createElement("div");
|
||||
if (!isHelpPage) {
|
||||
container.className = "popover";
|
||||
container.className = "popover content";
|
||||
}
|
||||
container.id = "help";
|
||||
container.style.display = "none";
|
||||
|
||||
const side_by_side = document.createElement("div");
|
||||
side_by_side.className = "side-by-side";
|
||||
@@ -1590,17 +1709,16 @@ function preLoadCss(cssUrl) {
|
||||
help_section.appendChild(container);
|
||||
// @ts-expect-error
|
||||
document.getElementById("main-content").appendChild(help_section);
|
||||
container.style.display = "block";
|
||||
} else {
|
||||
const help_button = getHelpButton();
|
||||
// @ts-expect-error
|
||||
help_button.appendChild(container);
|
||||
|
||||
container.onblur = helpBlurHandler;
|
||||
// @ts-expect-error
|
||||
help_button.onblur = helpBlurHandler;
|
||||
// @ts-expect-error
|
||||
help_button.children[0].onblur = helpBlurHandler;
|
||||
onEachLazy(document.getElementsByClassName("help-menu"), menu => {
|
||||
if (menu.offsetWidth !== 0) {
|
||||
menu.appendChild(container);
|
||||
container.onblur = helpBlurHandler;
|
||||
menu.onblur = helpBlurHandler;
|
||||
menu.children[0].onblur = helpBlurHandler;
|
||||
return true;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
return container;
|
||||
@@ -1621,80 +1739,57 @@ function preLoadCss(cssUrl) {
|
||||
* Hide all the popover menus.
|
||||
*/
|
||||
window.hidePopoverMenus = () => {
|
||||
onEachLazy(document.querySelectorAll("rustdoc-toolbar .popover"), elem => {
|
||||
onEachLazy(document.querySelectorAll(".settings-menu .popover"), elem => {
|
||||
elem.style.display = "none";
|
||||
});
|
||||
const button = getHelpButton();
|
||||
if (button) {
|
||||
removeClass(button, "help-open");
|
||||
}
|
||||
onEachLazy(document.querySelectorAll(".help-menu .popover"), elem => {
|
||||
elem.parentElement.removeChild(elem);
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Returns the help menu element (not the button).
|
||||
*
|
||||
* @param {boolean} buildNeeded - If this argument is `false`, the help menu element won't be
|
||||
* built if it doesn't exist.
|
||||
*
|
||||
* @return {HTMLElement}
|
||||
*/
|
||||
function getHelpMenu(buildNeeded) {
|
||||
// @ts-expect-error
|
||||
let menu = getHelpButton().querySelector(".popover");
|
||||
if (!menu && buildNeeded) {
|
||||
menu = buildHelpMenu();
|
||||
}
|
||||
// @ts-expect-error
|
||||
return menu;
|
||||
}
|
||||
|
||||
/**
|
||||
* Show the help popup menu.
|
||||
*/
|
||||
function showHelp() {
|
||||
window.hideAllModals(false);
|
||||
// Prevent `blur` events from being dispatched as a result of closing
|
||||
// other modals.
|
||||
const button = getHelpButton();
|
||||
addClass(button, "help-open");
|
||||
// @ts-expect-error
|
||||
button.querySelector("a").focus();
|
||||
const menu = getHelpMenu(true);
|
||||
if (menu.style.display === "none") {
|
||||
// @ts-expect-error
|
||||
window.hideAllModals();
|
||||
menu.style.display = "";
|
||||
}
|
||||
onEachLazy(document.querySelectorAll(".help-menu a"), menu => {
|
||||
if (menu.offsetWidth !== 0) {
|
||||
menu.focus();
|
||||
return true;
|
||||
}
|
||||
});
|
||||
buildHelpMenu();
|
||||
}
|
||||
|
||||
const helpLink = document.querySelector(`#${HELP_BUTTON_ID} > a`);
|
||||
if (isHelpPage) {
|
||||
buildHelpMenu();
|
||||
} else if (helpLink) {
|
||||
helpLink.addEventListener("click", event => {
|
||||
// By default, have help button open docs in a popover.
|
||||
// If user clicks with a moderator, though, use default browser behavior,
|
||||
// probably opening in a new window or tab.
|
||||
if (!helpLink.contains(helpLink) ||
|
||||
// @ts-expect-error
|
||||
event.ctrlKey ||
|
||||
// @ts-expect-error
|
||||
event.altKey ||
|
||||
// @ts-expect-error
|
||||
event.metaKey) {
|
||||
return;
|
||||
}
|
||||
event.preventDefault();
|
||||
const menu = getHelpMenu(true);
|
||||
const shouldShowHelp = menu.style.display === "none";
|
||||
if (shouldShowHelp) {
|
||||
showHelp();
|
||||
} else {
|
||||
window.hidePopoverMenus();
|
||||
}
|
||||
} else {
|
||||
onEachLazy(document.querySelectorAll(".help-menu > a"), helpLink => {
|
||||
helpLink.addEventListener(
|
||||
"click",
|
||||
/** @param {MouseEvent} event */
|
||||
event => {
|
||||
// By default, have help button open docs in a popover.
|
||||
// If user clicks with a moderator, though, use default browser behavior,
|
||||
// probably opening in a new window or tab.
|
||||
if (event.ctrlKey ||
|
||||
event.altKey ||
|
||||
event.metaKey) {
|
||||
return;
|
||||
}
|
||||
event.preventDefault();
|
||||
if (document.getElementById("help")) {
|
||||
window.hidePopoverMenus();
|
||||
} else {
|
||||
showHelp();
|
||||
}
|
||||
},
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
setMobileTopbar();
|
||||
addSidebarItems();
|
||||
addSidebarCrates();
|
||||
onHashChange(null);
|
||||
@@ -1746,7 +1841,15 @@ function preLoadCss(cssUrl) {
|
||||
// On larger, "desktop-sized" viewports (though that includes many
|
||||
// tablets), it's fixed-position, appears in the left side margin,
|
||||
// and it can be activated by resizing the sidebar into nothing.
|
||||
const sidebarButton = document.getElementById("sidebar-button");
|
||||
let sidebarButton = document.getElementById("sidebar-button");
|
||||
const body = document.querySelector(".main-heading");
|
||||
if (!sidebarButton && body) {
|
||||
sidebarButton = document.createElement("div");
|
||||
sidebarButton.id = "sidebar-button";
|
||||
const path = `${window.rootPath}${window.currentCrate}/all.html`;
|
||||
sidebarButton.innerHTML = `<a href="${path}" title="show sidebar"></a>`;
|
||||
body.insertBefore(sidebarButton, body.firstChild);
|
||||
}
|
||||
if (sidebarButton) {
|
||||
sidebarButton.addEventListener("click", e => {
|
||||
removeClass(document.documentElement, "hide-sidebar");
|
||||
|
||||
+134
-118
@@ -2,6 +2,8 @@
|
||||
// not put into the JavaScript we include as part of the documentation. It is used for
|
||||
// type checking. See README.md in this directory for more info.
|
||||
|
||||
import { RoaringBitmap } from "./stringdex";
|
||||
|
||||
/* eslint-disable */
|
||||
declare global {
|
||||
/** Search engine data used by main.js and search.js */
|
||||
@@ -10,6 +12,17 @@ declare global {
|
||||
declare function nonnull(x: T|null, msg: string|undefined);
|
||||
/** Defined and documented in `storage.js` */
|
||||
declare function nonundef(x: T|undefined, msg: string|undefined);
|
||||
interface PromiseConstructor {
|
||||
/**
|
||||
* Polyfill
|
||||
* @template T
|
||||
*/
|
||||
withResolvers: function(): {
|
||||
"promise": Promise<T>,
|
||||
"resolve": (function(T): void),
|
||||
"reject": (function(any): void)
|
||||
};
|
||||
}
|
||||
interface Window {
|
||||
/** Make the current theme easy to find */
|
||||
currentTheme: HTMLLinkElement|null;
|
||||
@@ -95,29 +108,28 @@ declare namespace rustdoc {
|
||||
interface SearchState {
|
||||
rustdocToolbar: HTMLElement|null;
|
||||
loadingText: string;
|
||||
input: HTMLInputElement|null;
|
||||
inputElement: function(): HTMLInputElement|null;
|
||||
containerElement: function(): Element|null;
|
||||
title: string;
|
||||
titleBeforeSearch: string;
|
||||
timeout: number|null;
|
||||
timeout: ReturnType<typeof setTimeout>|null;
|
||||
currentTab: number;
|
||||
focusedByTab: [number|null, number|null, number|null];
|
||||
focusedByTab: [Element|null, Element|null, Element|null];
|
||||
clearInputTimeout: function;
|
||||
outputElement(): HTMLElement|null;
|
||||
focus();
|
||||
defocus();
|
||||
// note: an optional param is not the same as
|
||||
// a nullable/undef-able param.
|
||||
showResults(elem?: HTMLElement|null);
|
||||
removeQueryParameters();
|
||||
hideResults();
|
||||
getQueryStringParams(): Object.<any, string>;
|
||||
origPlaceholder: string;
|
||||
outputElement: function(): Element|null;
|
||||
focus: function();
|
||||
defocus: function();
|
||||
toggle: function();
|
||||
showResults: function();
|
||||
removeQueryParameters: function();
|
||||
hideResults: function();
|
||||
getQueryStringParams: function(): Object.<any, string>;
|
||||
setup: function();
|
||||
setLoadingSearch();
|
||||
descShards: Map<string, SearchDescShard[]>;
|
||||
loadDesc: function({descShard: SearchDescShard, descIndex: number}): Promise<string|null>;
|
||||
loadedDescShard(string, number, string);
|
||||
isDisplayed(): boolean,
|
||||
loadedDescShard: function(string, number, string);
|
||||
isDisplayed: function(): boolean;
|
||||
}
|
||||
|
||||
interface SearchDescShard {
|
||||
@@ -131,12 +143,13 @@ declare namespace rustdoc {
|
||||
* A single parsed "atom" in a search query. For example,
|
||||
*
|
||||
* std::fmt::Formatter, Write -> Result<()>
|
||||
* ┏━━━━━━━━━━━━━━━━━━ ┌──── ┏━━━━━┅┅┅┅┄┄┄┄┄┄┄┄┄┄┄┄┄┄┐
|
||||
* ┃ │ ┗ QueryElement { ┊
|
||||
* ┃ │ name: Result ┊
|
||||
* ┃ │ generics: [ ┊
|
||||
* ┃ │ QueryElement ┘
|
||||
* ┃ │ name: ()
|
||||
* ┏━━━━━━━━━━━━━━━━━━ ┌──── ┏━━━━━┅┅┅┅┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┐
|
||||
* ┃ │ ┗ QueryElement { ┊
|
||||
* ┃ │ name: Result ┊
|
||||
* ┃ │ generics: [ ┊
|
||||
* ┃ │ QueryElement { ┘
|
||||
* ┃ │ name: ()
|
||||
* ┃ │ }
|
||||
* ┃ │ ]
|
||||
* ┃ │ }
|
||||
* ┃ └ QueryElement {
|
||||
@@ -156,14 +169,14 @@ declare namespace rustdoc {
|
||||
normalizedPathLast: string,
|
||||
generics: Array<QueryElement>,
|
||||
bindings: Map<number, Array<QueryElement>>,
|
||||
typeFilter: number|null,
|
||||
typeFilter: number,
|
||||
}
|
||||
|
||||
/**
|
||||
* Same as QueryElement, but bindings and typeFilter support strings
|
||||
*/
|
||||
interface ParserQueryElement {
|
||||
name: string|null,
|
||||
name: string,
|
||||
id: number|null,
|
||||
fullPath: Array<string>,
|
||||
pathWithoutLast: Array<string>,
|
||||
@@ -172,7 +185,7 @@ declare namespace rustdoc {
|
||||
generics: Array<ParserQueryElement>,
|
||||
bindings: Map<string, Array<ParserQueryElement>>,
|
||||
bindingName: {name: string|null, generics: ParserQueryElement[]}|null,
|
||||
typeFilter: number|string|null,
|
||||
typeFilter: string|null,
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -215,35 +228,74 @@ declare namespace rustdoc {
|
||||
/**
|
||||
* An entry in the search index database.
|
||||
*/
|
||||
interface EntryData {
|
||||
krate: number,
|
||||
ty: ItemType,
|
||||
modulePath: number?,
|
||||
exactModulePath: number?,
|
||||
parent: number?,
|
||||
deprecated: boolean,
|
||||
associatedItemDisambiguator: string?,
|
||||
}
|
||||
|
||||
/**
|
||||
* A path in the search index database
|
||||
*/
|
||||
interface PathData {
|
||||
ty: ItemType,
|
||||
modulePath: string,
|
||||
exactModulePath: string?,
|
||||
}
|
||||
|
||||
/**
|
||||
* A function signature in the search index database
|
||||
*
|
||||
* Note that some non-function items (eg. constants, struct fields) have a function signature so they can appear in type-based search.
|
||||
*/
|
||||
interface FunctionData {
|
||||
functionSignature: FunctionSearchType|null,
|
||||
paramNames: string[],
|
||||
elemCount: number,
|
||||
}
|
||||
|
||||
/**
|
||||
* A function signature in the search index database
|
||||
*/
|
||||
interface TypeData {
|
||||
searchUnbox: boolean,
|
||||
invertedFunctionSignatureIndex: RoaringBitmap[],
|
||||
}
|
||||
|
||||
/**
|
||||
* A search entry of some sort.
|
||||
*/
|
||||
interface Row {
|
||||
crate: string,
|
||||
descShard: SearchDescShard,
|
||||
id: number,
|
||||
// This is the name of the item. For doc aliases, if you want the name of the aliased
|
||||
// item, take a look at `Row.original.name`.
|
||||
crate: string,
|
||||
ty: ItemType,
|
||||
name: string,
|
||||
normalizedName: string,
|
||||
word: string,
|
||||
paramNames: string[],
|
||||
parent: ({ty: number, name: string, path: string, exactPath: string}|null|undefined),
|
||||
path: string,
|
||||
ty: number,
|
||||
type: FunctionSearchType | null,
|
||||
descIndex: number,
|
||||
bitIndex: number,
|
||||
implDisambiguator: String | null,
|
||||
is_alias?: boolean,
|
||||
original?: Row,
|
||||
modulePath: string,
|
||||
exactModulePath: string,
|
||||
entry: EntryData?,
|
||||
path: PathData?,
|
||||
type: FunctionData?,
|
||||
deprecated: boolean,
|
||||
parent: { path: PathData, name: string}?,
|
||||
}
|
||||
|
||||
type ItemType = 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 |
|
||||
11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 |
|
||||
21 | 22 | 23 | 24 | 25 | 26;
|
||||
|
||||
/**
|
||||
* The viewmodel for the search engine results page.
|
||||
*/
|
||||
interface ResultsTable {
|
||||
in_args: Array<ResultObject>,
|
||||
returned: Array<ResultObject>,
|
||||
others: Array<ResultObject>,
|
||||
query: ParsedQuery,
|
||||
in_args: AsyncGenerator<ResultObject>,
|
||||
returned: AsyncGenerator<ResultObject>,
|
||||
others: AsyncGenerator<ResultObject>,
|
||||
query: ParsedQuery<rustdoc.ParserQueryElement>,
|
||||
}
|
||||
|
||||
type Results = { max_dist?: number } & Map<number, ResultObject>
|
||||
@@ -252,25 +304,41 @@ declare namespace rustdoc {
|
||||
* An annotated `Row`, used in the viewmodel.
|
||||
*/
|
||||
interface ResultObject {
|
||||
desc: string,
|
||||
desc: Promise<string|null>,
|
||||
displayPath: string,
|
||||
fullPath: string,
|
||||
href: string,
|
||||
id: number,
|
||||
dist: number,
|
||||
path_dist: number,
|
||||
name: string,
|
||||
normalizedName: string,
|
||||
word: string,
|
||||
index: number,
|
||||
parent: (Object|undefined),
|
||||
path: string,
|
||||
ty: number,
|
||||
parent: ({
|
||||
path: string,
|
||||
exactPath: string,
|
||||
name: string,
|
||||
ty: number,
|
||||
}|undefined),
|
||||
type?: FunctionSearchType,
|
||||
paramNames?: string[],
|
||||
displayTypeSignature: Promise<rustdoc.DisplayTypeSignature> | null,
|
||||
item: Row,
|
||||
dontValidate?: boolean,
|
||||
is_alias: boolean,
|
||||
alias?: string,
|
||||
}
|
||||
|
||||
/**
|
||||
* An annotated `Row`, used in the viewmodel.
|
||||
*/
|
||||
interface PlainResultObject {
|
||||
id: number,
|
||||
dist: number,
|
||||
path_dist: number,
|
||||
index: number,
|
||||
elems: rustdoc.QueryElement[],
|
||||
returned: rustdoc.QueryElement[],
|
||||
is_alias: boolean,
|
||||
alias?: string,
|
||||
original?: rustdoc.Rlow,
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -364,7 +432,19 @@ declare namespace rustdoc {
|
||||
* Numeric IDs are *ONE-indexed* into the paths array (`p`). Zero is used as a sentinel for `null`
|
||||
* because `null` is four bytes while `0` is one byte.
|
||||
*/
|
||||
type RawFunctionType = number | [number, Array<RawFunctionType>];
|
||||
type RawFunctionType = number | [number, Array<RawFunctionType>] | [number, Array<RawFunctionType>, Array<[RawFunctionType, RawFunctionType[]]>];
|
||||
|
||||
/**
|
||||
* Utility typedef for deserializing compact JSON.
|
||||
*
|
||||
* R is the required part, O is the optional part, which goes afterward.
|
||||
* For example, `ArrayWithOptionals<[A, B], [C, D]>` matches
|
||||
* `[A, B] | [A, B, C] | [A, B, C, D]`.
|
||||
*/
|
||||
type ArrayWithOptionals<R extends any[], O extends any[]> =
|
||||
O extends [infer First, ...infer Rest] ?
|
||||
R | ArrayWithOptionals<[...R, First], Rest> :
|
||||
R;
|
||||
|
||||
/**
|
||||
* The type signature entry in the decoded search index.
|
||||
@@ -382,8 +462,8 @@ declare namespace rustdoc {
|
||||
*/
|
||||
interface FunctionType {
|
||||
id: null|number,
|
||||
ty: number|null,
|
||||
name?: string,
|
||||
ty: ItemType,
|
||||
name: string|null,
|
||||
path: string|null,
|
||||
exactPath: string|null,
|
||||
unboxFlag: boolean,
|
||||
@@ -403,70 +483,6 @@ declare namespace rustdoc {
|
||||
bindings: Map<number, FingerprintableType[]>;
|
||||
};
|
||||
|
||||
/**
|
||||
* The raw search data for a given crate. `n`, `t`, `d`, `i`, and `f`
|
||||
* are arrays with the same length. `q`, `a`, and `c` use a sparse
|
||||
* representation for compactness.
|
||||
*
|
||||
* `n[i]` contains the name of an item.
|
||||
*
|
||||
* `t[i]` contains the type of that item
|
||||
* (as a string of characters that represent an offset in `itemTypes`).
|
||||
*
|
||||
* `d[i]` contains the description of that item.
|
||||
*
|
||||
* `q` contains the full paths of the items. For compactness, it is a set of
|
||||
* (index, path) pairs used to create a map. If a given index `i` is
|
||||
* not present, this indicates "same as the last index present".
|
||||
*
|
||||
* `i[i]` contains an item's parent, usually a module. For compactness,
|
||||
* it is a set of indexes into the `p` array.
|
||||
*
|
||||
* `f` contains function signatures, or `0` if the item isn't a function.
|
||||
* More information on how they're encoded can be found in rustc-dev-guide
|
||||
*
|
||||
* Functions are themselves encoded as arrays. The first item is a list of
|
||||
* types representing the function's inputs, and the second list item is a list
|
||||
* of types representing the function's output. Tuples are flattened.
|
||||
* Types are also represented as arrays; the first item is an index into the `p`
|
||||
* array, while the second is a list of types representing any generic parameters.
|
||||
*
|
||||
* b[i] contains an item's impl disambiguator. This is only present if an item
|
||||
* is defined in an impl block and, the impl block's type has more than one associated
|
||||
* item with the same name.
|
||||
*
|
||||
* `a` defines aliases with an Array of pairs: [name, offset], where `offset`
|
||||
* points into the n/t/d/q/i/f arrays.
|
||||
*
|
||||
* `doc` contains the description of the crate.
|
||||
*
|
||||
* `p` is a list of path/type pairs. It is used for parents and function parameters.
|
||||
* The first item is the type, the second is the name, the third is the visible path (if any) and
|
||||
* the fourth is the canonical path used for deduplication (if any).
|
||||
*
|
||||
* `r` is the canonical path used for deduplication of re-exported items.
|
||||
* It is not used for associated items like methods (that's the fourth element
|
||||
* of `p`) but is used for modules items like free functions.
|
||||
*
|
||||
* `c` is an array of item indices that are deprecated.
|
||||
*/
|
||||
type RawSearchIndexCrate = {
|
||||
doc: string,
|
||||
a: { [key: string]: number[] },
|
||||
n: Array<string>,
|
||||
t: string,
|
||||
D: string,
|
||||
e: string,
|
||||
q: Array<[number, string]>,
|
||||
i: string,
|
||||
f: string,
|
||||
p: Array<[number, string] | [number, string, number] | [number, string, number, number] | [number, string, number, number, string]>,
|
||||
b: Array<[number, String]>,
|
||||
c: string,
|
||||
r: Array<[number, number]>,
|
||||
P: Array<[number, string]>,
|
||||
};
|
||||
|
||||
type VlqData = VlqData[] | number;
|
||||
|
||||
/**
|
||||
|
||||
+2315
-2928
@@ -1,9 +1,16 @@
|
||||
// ignore-tidy-filelength
|
||||
/* global addClass, getNakedUrl, getSettingValue, getVar */
|
||||
/* global onEachLazy, removeClass, searchState, browserSupportsHistoryApi, exports */
|
||||
/* global addClass, getNakedUrl, getVar, nonnull, getSettingValue */
|
||||
/* global onEachLazy, removeClass, searchState, browserSupportsHistoryApi */
|
||||
|
||||
"use strict";
|
||||
|
||||
/**
|
||||
* @param {stringdex.Stringdex} Stringdex
|
||||
* @param {typeof stringdex.RoaringBitmap} RoaringBitmap
|
||||
* @param {stringdex.Hooks} hooks
|
||||
*/
|
||||
const initSearch = async function(Stringdex, RoaringBitmap, hooks) {
|
||||
|
||||
// polyfill
|
||||
// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array/toSpliced
|
||||
if (!Array.prototype.toSpliced) {
|
||||
@@ -20,31 +27,65 @@ if (!Array.prototype.toSpliced) {
|
||||
*
|
||||
* @template T
|
||||
* @param {Iterable<T>} arr
|
||||
* @param {function(T): any} func
|
||||
* @param {function(T): Promise<any>} func
|
||||
* @param {function(T): boolean} funcBtwn
|
||||
*/
|
||||
function onEachBtwn(arr, func, funcBtwn) {
|
||||
async function onEachBtwnAsync(arr, func, funcBtwn) {
|
||||
let skipped = true;
|
||||
for (const value of arr) {
|
||||
if (!skipped) {
|
||||
funcBtwn(value);
|
||||
}
|
||||
skipped = func(value);
|
||||
skipped = await func(value);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert any `undefined` to `null`.
|
||||
*
|
||||
* @template T
|
||||
* @param {T|undefined} x
|
||||
* @returns {T|null}
|
||||
* Allow the browser to redraw.
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
function undef2null(x) {
|
||||
if (x !== undefined) {
|
||||
return x;
|
||||
}
|
||||
return null;
|
||||
const yieldToBrowser = typeof window !== "undefined" && window.requestIdleCallback ?
|
||||
function() {
|
||||
return new Promise((resolve, _reject) => {
|
||||
window.requestIdleCallback(resolve);
|
||||
});
|
||||
} :
|
||||
function() {
|
||||
return new Promise((resolve, _reject) => {
|
||||
setTimeout(resolve, 0);
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Promise-based timer wrapper.
|
||||
* @param {number} ms
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
const timeout = function(ms) {
|
||||
return new Promise((resolve, _reject) => {
|
||||
setTimeout(resolve, ms);
|
||||
});
|
||||
};
|
||||
|
||||
if (!Promise.withResolvers) {
|
||||
/**
|
||||
* Polyfill
|
||||
* @template T
|
||||
* @returns {{
|
||||
"promise": Promise<T>,
|
||||
"resolve": (function(T): void),
|
||||
"reject": (function(any): void)
|
||||
}}
|
||||
*/
|
||||
Promise.withResolvers = () => {
|
||||
let resolve, reject;
|
||||
const promise = new Promise((res, rej) => {
|
||||
resolve = res;
|
||||
reject = rej;
|
||||
});
|
||||
// @ts-expect-error
|
||||
return {promise, resolve, reject};
|
||||
};
|
||||
}
|
||||
|
||||
// ==================== Core search logic begin ====================
|
||||
@@ -81,13 +122,22 @@ const itemTypes = [
|
||||
];
|
||||
|
||||
// used for special search precedence
|
||||
const TY_PRIMITIVE = itemTypes.indexOf("primitive");
|
||||
const TY_GENERIC = itemTypes.indexOf("generic");
|
||||
const TY_IMPORT = itemTypes.indexOf("import");
|
||||
const TY_TRAIT = itemTypes.indexOf("trait");
|
||||
const TY_FN = itemTypes.indexOf("fn");
|
||||
const TY_METHOD = itemTypes.indexOf("method");
|
||||
const TY_TYMETHOD = itemTypes.indexOf("tymethod");
|
||||
/** @type {rustdoc.ItemType} */
|
||||
const TY_PRIMITIVE = 1;
|
||||
/** @type {rustdoc.ItemType} */
|
||||
const TY_GENERIC = 26;
|
||||
/** @type {rustdoc.ItemType} */
|
||||
const TY_IMPORT = 4;
|
||||
/** @type {rustdoc.ItemType} */
|
||||
const TY_TRAIT = 10;
|
||||
/** @type {rustdoc.ItemType} */
|
||||
const TY_FN = 7;
|
||||
/** @type {rustdoc.ItemType} */
|
||||
const TY_METHOD = 13;
|
||||
/** @type {rustdoc.ItemType} */
|
||||
const TY_TYMETHOD = 12;
|
||||
/** @type {rustdoc.ItemType} */
|
||||
const TY_ASSOCTYPE = 17;
|
||||
const ROOT_PATH = typeof window !== "undefined" ? window.rootPath : "../";
|
||||
|
||||
// Hard limit on how deep to recurse into generics when doing type-driven search.
|
||||
@@ -242,7 +292,9 @@ function isEndCharacter(c) {
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {number} ty
|
||||
* Same thing as ItemType::is_fn_like in item_type.rs
|
||||
*
|
||||
* @param {rustdoc.ItemType} ty
|
||||
* @returns
|
||||
*/
|
||||
function isFnLikeTy(ty) {
|
||||
@@ -1023,6 +1075,7 @@ class VlqHexDecoder {
|
||||
this.string = string;
|
||||
this.cons = cons;
|
||||
this.offset = 0;
|
||||
this.elemCount = 0;
|
||||
/** @type {T[]} */
|
||||
this.backrefQueue = [];
|
||||
}
|
||||
@@ -1060,6 +1113,7 @@ class VlqHexDecoder {
|
||||
n = (n << 4) | (c & 0xF);
|
||||
const [sign, value] = [n & 1, n >> 1];
|
||||
this.offset += 1;
|
||||
this.elemCount += 1;
|
||||
return sign ? -value : value;
|
||||
}
|
||||
/**
|
||||
@@ -1086,1247 +1140,138 @@ class VlqHexDecoder {
|
||||
return result;
|
||||
}
|
||||
}
|
||||
class RoaringBitmap {
|
||||
/** @param {string} str */
|
||||
constructor(str) {
|
||||
// https://github.com/RoaringBitmap/RoaringFormatSpec
|
||||
//
|
||||
// Roaring bitmaps are used for flags that can be kept in their
|
||||
// compressed form, even when loaded into memory. This decoder
|
||||
// turns the containers into objects, but uses byte array
|
||||
// slices of the original format for the data payload.
|
||||
const strdecoded = atob(str);
|
||||
const u8array = new Uint8Array(strdecoded.length);
|
||||
for (let j = 0; j < strdecoded.length; ++j) {
|
||||
u8array[j] = strdecoded.charCodeAt(j);
|
||||
}
|
||||
const has_runs = u8array[0] === 0x3b;
|
||||
const size = has_runs ?
|
||||
((u8array[2] | (u8array[3] << 8)) + 1) :
|
||||
((u8array[4] | (u8array[5] << 8) | (u8array[6] << 16) | (u8array[7] << 24)));
|
||||
let i = has_runs ? 4 : 8;
|
||||
let is_run;
|
||||
if (has_runs) {
|
||||
const is_run_len = Math.floor((size + 7) / 8);
|
||||
is_run = u8array.slice(i, i + is_run_len);
|
||||
i += is_run_len;
|
||||
} else {
|
||||
is_run = new Uint8Array();
|
||||
}
|
||||
this.keys = [];
|
||||
this.cardinalities = [];
|
||||
for (let j = 0; j < size; ++j) {
|
||||
this.keys.push(u8array[i] | (u8array[i + 1] << 8));
|
||||
i += 2;
|
||||
this.cardinalities.push((u8array[i] | (u8array[i + 1] << 8)) + 1);
|
||||
i += 2;
|
||||
}
|
||||
this.containers = [];
|
||||
let offsets = null;
|
||||
if (!has_runs || this.keys.length >= 4) {
|
||||
offsets = [];
|
||||
for (let j = 0; j < size; ++j) {
|
||||
offsets.push(u8array[i] | (u8array[i + 1] << 8) | (u8array[i + 2] << 16) |
|
||||
(u8array[i + 3] << 24));
|
||||
i += 4;
|
||||
}
|
||||
}
|
||||
for (let j = 0; j < size; ++j) {
|
||||
if (offsets && offsets[j] !== i) {
|
||||
// eslint-disable-next-line no-console
|
||||
console.log(this.containers);
|
||||
throw new Error(`corrupt bitmap ${j}: ${i} / ${offsets[j]}`);
|
||||
}
|
||||
if (is_run[j >> 3] & (1 << (j & 0x7))) {
|
||||
const runcount = (u8array[i] | (u8array[i + 1] << 8));
|
||||
i += 2;
|
||||
this.containers.push(new RoaringBitmapRun(
|
||||
runcount,
|
||||
u8array.slice(i, i + (runcount * 4)),
|
||||
));
|
||||
i += runcount * 4;
|
||||
} else if (this.cardinalities[j] >= 4096) {
|
||||
this.containers.push(new RoaringBitmapBits(u8array.slice(i, i + 8192)));
|
||||
i += 8192;
|
||||
} else {
|
||||
const end = this.cardinalities[j] * 2;
|
||||
this.containers.push(new RoaringBitmapArray(
|
||||
this.cardinalities[j],
|
||||
u8array.slice(i, i + end),
|
||||
));
|
||||
i += end;
|
||||
}
|
||||
}
|
||||
}
|
||||
/** @param {number} keyvalue */
|
||||
contains(keyvalue) {
|
||||
const key = keyvalue >> 16;
|
||||
const value = keyvalue & 0xFFFF;
|
||||
// Binary search algorithm copied from
|
||||
// https://en.wikipedia.org/wiki/Binary_search#Procedure
|
||||
//
|
||||
// Format is required by specification to be sorted.
|
||||
// Because keys are 16 bits and unique, length can't be
|
||||
// bigger than 2**16, and because we have 32 bits of safe int,
|
||||
// left + right can't overflow.
|
||||
let left = 0;
|
||||
let right = this.keys.length - 1;
|
||||
while (left <= right) {
|
||||
const mid = Math.floor((left + right) / 2);
|
||||
const x = this.keys[mid];
|
||||
if (x < key) {
|
||||
left = mid + 1;
|
||||
} else if (x > key) {
|
||||
right = mid - 1;
|
||||
} else {
|
||||
return this.containers[mid].contains(value);
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
class RoaringBitmapRun {
|
||||
/**
|
||||
* @param {number} runcount
|
||||
* @param {Uint8Array} array
|
||||
*/
|
||||
constructor(runcount, array) {
|
||||
this.runcount = runcount;
|
||||
this.array = array;
|
||||
}
|
||||
/** @param {number} value */
|
||||
contains(value) {
|
||||
// Binary search algorithm copied from
|
||||
// https://en.wikipedia.org/wiki/Binary_search#Procedure
|
||||
//
|
||||
// Since runcount is stored as 16 bits, left + right
|
||||
// can't overflow.
|
||||
let left = 0;
|
||||
let right = this.runcount - 1;
|
||||
while (left <= right) {
|
||||
const mid = Math.floor((left + right) / 2);
|
||||
const i = mid * 4;
|
||||
const start = this.array[i] | (this.array[i + 1] << 8);
|
||||
const lenm1 = this.array[i + 2] | (this.array[i + 3] << 8);
|
||||
if ((start + lenm1) < value) {
|
||||
left = mid + 1;
|
||||
} else if (start > value) {
|
||||
right = mid - 1;
|
||||
} else {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
}
|
||||
class RoaringBitmapArray {
|
||||
/**
|
||||
* @param {number} cardinality
|
||||
* @param {Uint8Array} array
|
||||
*/
|
||||
constructor(cardinality, array) {
|
||||
this.cardinality = cardinality;
|
||||
this.array = array;
|
||||
}
|
||||
/** @param {number} value */
|
||||
contains(value) {
|
||||
// Binary search algorithm copied from
|
||||
// https://en.wikipedia.org/wiki/Binary_search#Procedure
|
||||
//
|
||||
// Since cardinality can't be higher than 4096, left + right
|
||||
// cannot overflow.
|
||||
let left = 0;
|
||||
let right = this.cardinality - 1;
|
||||
while (left <= right) {
|
||||
const mid = Math.floor((left + right) / 2);
|
||||
const i = mid * 2;
|
||||
const x = this.array[i] | (this.array[i + 1] << 8);
|
||||
if (x < value) {
|
||||
left = mid + 1;
|
||||
} else if (x > value) {
|
||||
right = mid - 1;
|
||||
} else {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
}
|
||||
class RoaringBitmapBits {
|
||||
/**
|
||||
* @param {Uint8Array} array
|
||||
*/
|
||||
constructor(array) {
|
||||
this.array = array;
|
||||
}
|
||||
/** @param {number} value */
|
||||
contains(value) {
|
||||
return !!(this.array[value >> 3] & (1 << (value & 7)));
|
||||
}
|
||||
}
|
||||
/** @type {Array<string>} */
|
||||
const EMPTY_STRING_ARRAY = [];
|
||||
|
||||
/** @type {Array<rustdoc.FunctionType>} */
|
||||
const EMPTY_GENERICS_ARRAY = [];
|
||||
|
||||
/** @type {Array<[number, rustdoc.FunctionType[]]>} */
|
||||
const EMPTY_BINDINGS_ARRAY = [];
|
||||
|
||||
/** @type {Map<number, Array<any>>} */
|
||||
const EMPTY_BINDINGS_MAP = new Map();
|
||||
|
||||
/**
|
||||
* A prefix tree, used for name-based search.
|
||||
*
|
||||
* This data structure is used to drive prefix matches,
|
||||
* such as matching the query "link" to `LinkedList`,
|
||||
* and Lev-distance matches, such as matching the
|
||||
* query "hahsmap" to `HashMap`. Substring matches,
|
||||
* such as "list" to `LinkedList`, are done with a
|
||||
* tailTable that deep-links into this trie.
|
||||
*
|
||||
* children
|
||||
* : A [sparse array] of subtrees. The array index
|
||||
* is a charCode.
|
||||
*
|
||||
* [sparse array]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/
|
||||
* Indexed_collections#sparse_arrays
|
||||
*
|
||||
* matches
|
||||
* : A list of search index IDs for this node.
|
||||
*
|
||||
* @type {{
|
||||
* children: NameTrie[],
|
||||
* matches: number[],
|
||||
* }}
|
||||
* @param {string|null} typename
|
||||
* @returns {number}
|
||||
*/
|
||||
class NameTrie {
|
||||
constructor() {
|
||||
this.children = [];
|
||||
this.matches = [];
|
||||
function itemTypeFromName(typename) {
|
||||
if (typename === null) {
|
||||
return NO_TYPE_FILTER;
|
||||
}
|
||||
/**
|
||||
* @param {string} name
|
||||
* @param {number} id
|
||||
* @param {Map<string, NameTrie[]>} tailTable
|
||||
*/
|
||||
insert(name, id, tailTable) {
|
||||
this.insertSubstring(name, 0, id, tailTable);
|
||||
}
|
||||
/**
|
||||
* @param {string} name
|
||||
* @param {number} substart
|
||||
* @param {number} id
|
||||
* @param {Map<string, NameTrie[]>} tailTable
|
||||
*/
|
||||
insertSubstring(name, substart, id, tailTable) {
|
||||
const l = name.length;
|
||||
if (substart === l) {
|
||||
this.matches.push(id);
|
||||
} else {
|
||||
const sb = name.charCodeAt(substart);
|
||||
let child;
|
||||
if (this.children[sb] !== undefined) {
|
||||
child = this.children[sb];
|
||||
} else {
|
||||
child = new NameTrie();
|
||||
this.children[sb] = child;
|
||||
/** @type {NameTrie[]} */
|
||||
let sste;
|
||||
if (substart >= 2) {
|
||||
const tail = name.substring(substart - 2, substart + 1);
|
||||
const entry = tailTable.get(tail);
|
||||
if (entry !== undefined) {
|
||||
sste = entry;
|
||||
} else {
|
||||
sste = [];
|
||||
tailTable.set(tail, sste);
|
||||
}
|
||||
sste.push(child);
|
||||
}
|
||||
}
|
||||
child.insertSubstring(name, substart + 1, id, tailTable);
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @param {string} name
|
||||
* @param {Map<string, NameTrie[]>} tailTable
|
||||
*/
|
||||
search(name, tailTable) {
|
||||
const results = new Set();
|
||||
this.searchSubstringPrefix(name, 0, results);
|
||||
if (results.size < MAX_RESULTS && name.length >= 3) {
|
||||
const levParams = name.length >= 6 ?
|
||||
new Lev2TParametricDescription(name.length) :
|
||||
new Lev1TParametricDescription(name.length);
|
||||
this.searchLev(name, 0, levParams, results);
|
||||
const tail = name.substring(0, 3);
|
||||
const list = tailTable.get(tail);
|
||||
if (list !== undefined) {
|
||||
for (const entry of list) {
|
||||
entry.searchSubstringPrefix(name, 3, results);
|
||||
}
|
||||
}
|
||||
}
|
||||
return [...results];
|
||||
}
|
||||
/**
|
||||
* @param {string} name
|
||||
* @param {number} substart
|
||||
* @param {Set<number>} results
|
||||
*/
|
||||
searchSubstringPrefix(name, substart, results) {
|
||||
const l = name.length;
|
||||
if (substart === l) {
|
||||
for (const match of this.matches) {
|
||||
results.add(match);
|
||||
}
|
||||
// breadth-first traversal orders prefix matches by length
|
||||
/** @type {NameTrie[]} */
|
||||
let unprocessedChildren = [];
|
||||
for (const child of this.children) {
|
||||
if (child) {
|
||||
unprocessedChildren.push(child);
|
||||
}
|
||||
}
|
||||
/** @type {NameTrie[]} */
|
||||
let nextSet = [];
|
||||
while (unprocessedChildren.length !== 0) {
|
||||
/** @type {NameTrie} */
|
||||
// @ts-expect-error
|
||||
const next = unprocessedChildren.pop();
|
||||
for (const child of next.children) {
|
||||
if (child) {
|
||||
nextSet.push(child);
|
||||
}
|
||||
}
|
||||
for (const match of next.matches) {
|
||||
results.add(match);
|
||||
}
|
||||
if (unprocessedChildren.length === 0) {
|
||||
const tmp = unprocessedChildren;
|
||||
unprocessedChildren = nextSet;
|
||||
nextSet = tmp;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
const sb = name.charCodeAt(substart);
|
||||
if (this.children[sb] !== undefined) {
|
||||
this.children[sb].searchSubstringPrefix(name, substart + 1, results);
|
||||
}
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @param {string} name
|
||||
* @param {number} substart
|
||||
* @param {Lev2TParametricDescription|Lev1TParametricDescription} levParams
|
||||
* @param {Set<number>} results
|
||||
*/
|
||||
searchLev(name, substart, levParams, results) {
|
||||
const stack = [[this, 0]];
|
||||
const n = levParams.n;
|
||||
while (stack.length !== 0) {
|
||||
// It's not empty
|
||||
//@ts-expect-error
|
||||
const [trie, levState] = stack.pop();
|
||||
for (const [charCode, child] of trie.children.entries()) {
|
||||
if (!child) {
|
||||
continue;
|
||||
}
|
||||
const levPos = levParams.getPosition(levState);
|
||||
const vector = levParams.getVector(
|
||||
name,
|
||||
charCode,
|
||||
levPos,
|
||||
Math.min(name.length, levPos + (2 * n) + 1),
|
||||
);
|
||||
const newLevState = levParams.transition(
|
||||
levState,
|
||||
levPos,
|
||||
vector,
|
||||
);
|
||||
if (newLevState >= 0) {
|
||||
stack.push([child, newLevState]);
|
||||
if (levParams.isAccept(newLevState)) {
|
||||
for (const match of child.matches) {
|
||||
results.add(match);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
const index = itemTypes.findIndex(i => i === typename);
|
||||
if (index < 0) {
|
||||
throw ["Unknown type filter ", typename];
|
||||
}
|
||||
return index;
|
||||
}
|
||||
|
||||
class DocSearch {
|
||||
/**
|
||||
* @param {Map<string, rustdoc.RawSearchIndexCrate>} rawSearchIndex
|
||||
* @param {string} rootPath
|
||||
* @param {rustdoc.SearchState} searchState
|
||||
* @param {stringdex.Database} database
|
||||
*/
|
||||
constructor(rawSearchIndex, rootPath, searchState) {
|
||||
/**
|
||||
* @type {Map<String, RoaringBitmap>}
|
||||
*/
|
||||
this.searchIndexDeprecated = new Map();
|
||||
/**
|
||||
* @type {Map<String, RoaringBitmap>}
|
||||
*/
|
||||
this.searchIndexEmptyDesc = new Map();
|
||||
/**
|
||||
* @type {Uint32Array}
|
||||
*/
|
||||
this.functionTypeFingerprint = new Uint32Array(0);
|
||||
/**
|
||||
* Map from normalized type names to integers. Used to make type search
|
||||
* more efficient.
|
||||
*
|
||||
* @type {Map<string, {id: number, assocOnly: boolean}>}
|
||||
*/
|
||||
this.typeNameIdMap = new Map();
|
||||
/**
|
||||
* Map from type ID to associated type name. Used for display,
|
||||
* not for search.
|
||||
*
|
||||
* @type {Map<number, string>}
|
||||
*/
|
||||
this.assocTypeIdNameMap = new Map();
|
||||
this.ALIASES = new Map();
|
||||
this.FOUND_ALIASES = new Set();
|
||||
constructor(rootPath, database) {
|
||||
this.rootPath = rootPath;
|
||||
this.searchState = searchState;
|
||||
this.database = database;
|
||||
|
||||
/**
|
||||
* Special type name IDs for searching by array.
|
||||
* @type {number}
|
||||
*/
|
||||
this.typeNameIdOfArray = this.buildTypeMapIndex("array");
|
||||
/**
|
||||
* Special type name IDs for searching by slice.
|
||||
* @type {number}
|
||||
*/
|
||||
this.typeNameIdOfSlice = this.buildTypeMapIndex("slice");
|
||||
/**
|
||||
* Special type name IDs for searching by both array and slice (`[]` syntax).
|
||||
* @type {number}
|
||||
*/
|
||||
this.typeNameIdOfArrayOrSlice = this.buildTypeMapIndex("[]");
|
||||
/**
|
||||
* Special type name IDs for searching by tuple.
|
||||
* @type {number}
|
||||
*/
|
||||
this.typeNameIdOfTuple = this.buildTypeMapIndex("tuple");
|
||||
/**
|
||||
* Special type name IDs for searching by unit.
|
||||
* @type {number}
|
||||
*/
|
||||
this.typeNameIdOfUnit = this.buildTypeMapIndex("unit");
|
||||
/**
|
||||
* Special type name IDs for searching by both tuple and unit (`()` syntax).
|
||||
* @type {number}
|
||||
*/
|
||||
this.typeNameIdOfTupleOrUnit = this.buildTypeMapIndex("()");
|
||||
/**
|
||||
* Special type name IDs for searching `fn`.
|
||||
* @type {number}
|
||||
*/
|
||||
this.typeNameIdOfFn = this.buildTypeMapIndex("fn");
|
||||
/**
|
||||
* Special type name IDs for searching `fnmut`.
|
||||
* @type {number}
|
||||
*/
|
||||
this.typeNameIdOfFnMut = this.buildTypeMapIndex("fnmut");
|
||||
/**
|
||||
* Special type name IDs for searching `fnonce`.
|
||||
* @type {number}
|
||||
*/
|
||||
this.typeNameIdOfFnOnce = this.buildTypeMapIndex("fnonce");
|
||||
/**
|
||||
* Special type name IDs for searching higher order functions (`->` syntax).
|
||||
* @type {number}
|
||||
*/
|
||||
this.typeNameIdOfHof = this.buildTypeMapIndex("->");
|
||||
/**
|
||||
* Special type name IDs the output assoc type.
|
||||
* @type {number}
|
||||
*/
|
||||
this.typeNameIdOfOutput = this.buildTypeMapIndex("output", true);
|
||||
/**
|
||||
* Special type name IDs for searching by reference.
|
||||
* @type {number}
|
||||
*/
|
||||
this.typeNameIdOfReference = this.buildTypeMapIndex("reference");
|
||||
this.typeNameIdOfOutput = -1;
|
||||
this.typeNameIdOfArray = -1;
|
||||
this.typeNameIdOfSlice = -1;
|
||||
this.typeNameIdOfArrayOrSlice = -1;
|
||||
this.typeNameIdOfTuple = -1;
|
||||
this.typeNameIdOfUnit = -1;
|
||||
this.typeNameIdOfTupleOrUnit = -1;
|
||||
this.typeNameIdOfReference = -1;
|
||||
this.typeNameIdOfHof = -1;
|
||||
|
||||
/**
|
||||
* Empty, immutable map used in item search types with no bindings.
|
||||
*
|
||||
* @type {Map<number, Array<any>>}
|
||||
*/
|
||||
this.EMPTY_BINDINGS_MAP = new Map();
|
||||
this.utf8decoder = new TextDecoder();
|
||||
|
||||
/**
|
||||
* Empty, immutable map used in item search types with no bindings.
|
||||
*
|
||||
* @type {Array<any>}
|
||||
*/
|
||||
this.EMPTY_GENERICS_ARRAY = [];
|
||||
|
||||
/**
|
||||
* Object pool for function types with no bindings or generics.
|
||||
* This is reset after loading the index.
|
||||
*
|
||||
* @type {Map<number|null, rustdoc.FunctionType>}
|
||||
*/
|
||||
/** @type {Map<number|null, rustdoc.FunctionType>} */
|
||||
this.TYPES_POOL = new Map();
|
||||
|
||||
/**
|
||||
* A trie for finding items by name.
|
||||
* This is used for edit distance and prefix finding.
|
||||
*
|
||||
* @type {NameTrie}
|
||||
*/
|
||||
this.nameTrie = new NameTrie();
|
||||
|
||||
/**
|
||||
* Find items by 3-substring. This is a map from three-char
|
||||
* prefixes into lists of subtries.
|
||||
*/
|
||||
this.tailTable = new Map();
|
||||
|
||||
/**
|
||||
* @type {Array<rustdoc.Row>}
|
||||
*/
|
||||
this.searchIndex = this.buildIndex(rawSearchIndex);
|
||||
}
|
||||
|
||||
/**
|
||||
* Add an item to the type Name->ID map, or, if one already exists, use it.
|
||||
* Returns the number. If name is "" or null, return null (pure generic).
|
||||
*
|
||||
* This is effectively string interning, so that function matching can be
|
||||
* done more quickly. Two types with the same name but different item kinds
|
||||
* get the same ID.
|
||||
*
|
||||
* @template T extends string
|
||||
* @overload
|
||||
* @param {T} name
|
||||
* @param {boolean=} isAssocType - True if this is an assoc type
|
||||
* @returns {T extends "" ? null : number}
|
||||
*
|
||||
* @param {string} name
|
||||
* @param {boolean=} isAssocType
|
||||
* @returns {number | null}
|
||||
*
|
||||
* Load search index. If you do not call this function, `execQuery`
|
||||
* will never fulfill.
|
||||
*/
|
||||
buildTypeMapIndex(name, isAssocType) {
|
||||
if (name === "" || name === null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const obj = this.typeNameIdMap.get(name);
|
||||
if (obj !== undefined) {
|
||||
obj.assocOnly = !!(isAssocType && obj.assocOnly);
|
||||
return obj.id;
|
||||
} else {
|
||||
const id = this.typeNameIdMap.size;
|
||||
this.typeNameIdMap.set(name, { id, assocOnly: !!isAssocType });
|
||||
return id;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert a list of RawFunctionType / ID to object-based FunctionType.
|
||||
*
|
||||
* Crates often have lots of functions in them, and it's common to have a large number of
|
||||
* functions that operate on a small set of data types, so the search index compresses them
|
||||
* by encoding function parameter and return types as indexes into an array of names.
|
||||
*
|
||||
* Even when a general-purpose compression algorithm is used, this is still a win.
|
||||
* I checked. https://github.com/rust-lang/rust/pull/98475#issue-1284395985
|
||||
*
|
||||
* The format for individual function types is encoded in
|
||||
* librustdoc/html/render/mod.rs: impl Serialize for RenderType
|
||||
*
|
||||
* @param {null|Array<rustdoc.RawFunctionType>} types
|
||||
* @param {Array<{
|
||||
* name: string,
|
||||
* ty: number,
|
||||
* path: string|null,
|
||||
* exactPath: string|null,
|
||||
* unboxFlag: boolean
|
||||
* }>} paths
|
||||
* @param {Array<{
|
||||
* name: string,
|
||||
* ty: number,
|
||||
* path: string|null,
|
||||
* exactPath: string|null,
|
||||
* unboxFlag: boolean,
|
||||
* }>} lowercasePaths
|
||||
*
|
||||
* @return {Array<rustdoc.FunctionType>}
|
||||
*/
|
||||
buildItemSearchTypeAll(types, paths, lowercasePaths) {
|
||||
return types && types.length > 0 ?
|
||||
types.map(type => this.buildItemSearchType(type, paths, lowercasePaths)) :
|
||||
this.EMPTY_GENERICS_ARRAY;
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts a single type.
|
||||
*
|
||||
* @param {rustdoc.RawFunctionType} type
|
||||
* @param {Array<{
|
||||
* name: string,
|
||||
* ty: number,
|
||||
* path: string|null,
|
||||
* exactPath: string|null,
|
||||
* unboxFlag: boolean
|
||||
* }>} paths
|
||||
* @param {Array<{
|
||||
* name: string,
|
||||
* ty: number,
|
||||
* path: string|null,
|
||||
* exactPath: string|null,
|
||||
* unboxFlag: boolean,
|
||||
* }>} lowercasePaths
|
||||
* @param {boolean=} isAssocType
|
||||
*/
|
||||
buildItemSearchType(type, paths, lowercasePaths, isAssocType) {
|
||||
const PATH_INDEX_DATA = 0;
|
||||
const GENERICS_DATA = 1;
|
||||
const BINDINGS_DATA = 2;
|
||||
let pathIndex, generics, bindings;
|
||||
if (typeof type === "number") {
|
||||
pathIndex = type;
|
||||
generics = this.EMPTY_GENERICS_ARRAY;
|
||||
bindings = this.EMPTY_BINDINGS_MAP;
|
||||
} else {
|
||||
pathIndex = type[PATH_INDEX_DATA];
|
||||
generics = this.buildItemSearchTypeAll(
|
||||
type[GENERICS_DATA],
|
||||
paths,
|
||||
lowercasePaths,
|
||||
);
|
||||
// @ts-expect-error
|
||||
if (type.length > BINDINGS_DATA && type[BINDINGS_DATA].length > 0) {
|
||||
// @ts-expect-error
|
||||
bindings = new Map(type[BINDINGS_DATA].map(binding => {
|
||||
const [assocType, constraints] = binding;
|
||||
// Associated type constructors are represented sloppily in rustdoc's
|
||||
// type search, to make the engine simpler.
|
||||
//
|
||||
// MyType<Output<T>=Result<T>> is equivalent to MyType<Output<Result<T>>=T>
|
||||
// and both are, essentially
|
||||
// MyType<Output=(T, Result<T>)>, except the tuple isn't actually there.
|
||||
// It's more like the value of a type binding is naturally an array,
|
||||
// which rustdoc calls "constraints".
|
||||
//
|
||||
// As a result, the key should never have generics on it.
|
||||
return [
|
||||
this.buildItemSearchType(assocType, paths, lowercasePaths, true).id,
|
||||
this.buildItemSearchTypeAll(constraints, paths, lowercasePaths),
|
||||
];
|
||||
}));
|
||||
} else {
|
||||
bindings = this.EMPTY_BINDINGS_MAP;
|
||||
}
|
||||
async buildIndex() {
|
||||
const nn = this.database.getIndex("normalizedName");
|
||||
if (!nn) {
|
||||
return;
|
||||
}
|
||||
// Each of these identifiers are used specially by
|
||||
// type-driven search.
|
||||
const [
|
||||
// output is the special associated type that goes
|
||||
// after the arrow: the type checker desugars
|
||||
// the path `Fn(a) -> b` into `Fn<Output=b, (a)>`
|
||||
output,
|
||||
// fn, fnmut, and fnonce all match `->`
|
||||
fn,
|
||||
fnMut,
|
||||
fnOnce,
|
||||
hof,
|
||||
// array and slice both match `[]`
|
||||
array,
|
||||
slice,
|
||||
arrayOrSlice,
|
||||
// tuple and unit both match `()`
|
||||
tuple,
|
||||
unit,
|
||||
tupleOrUnit,
|
||||
// reference matches `&`
|
||||
reference,
|
||||
// never matches `!`
|
||||
never,
|
||||
] = await Promise.all([
|
||||
nn.search("output"),
|
||||
nn.search("fn"),
|
||||
nn.search("fnmut"),
|
||||
nn.search("fnonce"),
|
||||
nn.search("->"),
|
||||
nn.search("array"),
|
||||
nn.search("slice"),
|
||||
nn.search("[]"),
|
||||
nn.search("tuple"),
|
||||
nn.search("unit"),
|
||||
nn.search("()"),
|
||||
nn.search("reference"),
|
||||
nn.search("never"),
|
||||
]);
|
||||
/**
|
||||
* @type {rustdoc.FunctionType}
|
||||
*/
|
||||
let result;
|
||||
if (pathIndex < 0) {
|
||||
// types less than 0 are generic parameters
|
||||
// the actual names of generic parameters aren't stored, since they aren't API
|
||||
result = {
|
||||
id: pathIndex,
|
||||
name: "",
|
||||
ty: TY_GENERIC,
|
||||
path: null,
|
||||
exactPath: null,
|
||||
generics,
|
||||
bindings,
|
||||
unboxFlag: true,
|
||||
};
|
||||
} else if (pathIndex === 0) {
|
||||
// `0` is used as a sentinel because it's fewer bytes than `null`
|
||||
result = {
|
||||
id: null,
|
||||
name: "",
|
||||
ty: null,
|
||||
path: null,
|
||||
exactPath: null,
|
||||
generics,
|
||||
bindings,
|
||||
unboxFlag: true,
|
||||
};
|
||||
} else {
|
||||
const item = lowercasePaths[pathIndex - 1];
|
||||
const id = this.buildTypeMapIndex(item.name, isAssocType);
|
||||
if (isAssocType && id !== null) {
|
||||
this.assocTypeIdNameMap.set(id, paths[pathIndex - 1].name);
|
||||
}
|
||||
result = {
|
||||
id,
|
||||
name: paths[pathIndex - 1].name,
|
||||
ty: item.ty,
|
||||
path: item.path,
|
||||
exactPath: item.exactPath,
|
||||
generics,
|
||||
bindings,
|
||||
unboxFlag: item.unboxFlag,
|
||||
};
|
||||
}
|
||||
const cr = this.TYPES_POOL.get(result.id);
|
||||
if (cr) {
|
||||
// Shallow equality check. Since this function is used
|
||||
// to construct every type object, this should be mostly
|
||||
// equivalent to a deep equality check, except if there's
|
||||
// a conflict, we don't keep the old one around, so it's
|
||||
// not a fully precise implementation of hashcons.
|
||||
if (cr.generics.length === result.generics.length &&
|
||||
cr.generics !== result.generics &&
|
||||
cr.generics.every((x, i) => result.generics[i] === x)
|
||||
) {
|
||||
result.generics = cr.generics;
|
||||
}
|
||||
if (cr.bindings.size === result.bindings.size && cr.bindings !== result.bindings) {
|
||||
let ok = true;
|
||||
for (const [k, v] of cr.bindings.entries()) {
|
||||
// @ts-expect-error
|
||||
const v2 = result.bindings.get(v);
|
||||
if (!v2) {
|
||||
ok = false;
|
||||
break;
|
||||
}
|
||||
if (v !== v2 && v.length === v2.length && v.every((x, i) => v2[i] === x)) {
|
||||
result.bindings.set(k, v);
|
||||
} else if (v !== v2) {
|
||||
ok = false;
|
||||
break;
|
||||
* @param {stringdex.Trie|null|undefined} trie
|
||||
* @param {rustdoc.ItemType} ty
|
||||
* @param {string} modulePath
|
||||
* @returns {Promise<number>}
|
||||
* */
|
||||
const first = async(trie, ty, modulePath) => {
|
||||
if (trie) {
|
||||
for (const id of trie.matches().entries()) {
|
||||
const pathData = await this.getPathData(id);
|
||||
if (pathData && pathData.ty === ty && pathData.modulePath === modulePath) {
|
||||
return id;
|
||||
}
|
||||
}
|
||||
if (ok) {
|
||||
result.bindings = cr.bindings;
|
||||
}
|
||||
}
|
||||
if (cr.ty === result.ty && cr.path === result.path
|
||||
&& cr.bindings === result.bindings && cr.generics === result.generics
|
||||
&& cr.ty === result.ty && cr.name === result.name
|
||||
&& cr.unboxFlag === result.unboxFlag
|
||||
) {
|
||||
return cr;
|
||||
}
|
||||
}
|
||||
this.TYPES_POOL.set(result.id, result);
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Type fingerprints allow fast, approximate matching of types.
|
||||
*
|
||||
* This algo creates a compact representation of the type set using a Bloom filter.
|
||||
* This fingerprint is used three ways:
|
||||
*
|
||||
* - It accelerates the matching algorithm by checking the function fingerprint against the
|
||||
* query fingerprint. If any bits are set in the query but not in the function, it can't
|
||||
* match.
|
||||
*
|
||||
* - The fourth section has the number of items in the set.
|
||||
* This is the distance function, used for filtering and for sorting.
|
||||
*
|
||||
* [^1]: Distance is the relatively naive metric of counting the number of distinct items in
|
||||
* the function that are not present in the query.
|
||||
*
|
||||
* @param {rustdoc.FingerprintableType} type - a single type
|
||||
* @param {Uint32Array} output - write the fingerprint to this data structure: uses 128 bits
|
||||
*/
|
||||
buildFunctionTypeFingerprint(type, output) {
|
||||
let input = type.id;
|
||||
// All forms of `[]`/`()`/`->` get collapsed down to one thing in the bloom filter.
|
||||
// Differentiating between arrays and slices, if the user asks for it, is
|
||||
// still done in the matching algorithm.
|
||||
if (input === this.typeNameIdOfArray || input === this.typeNameIdOfSlice) {
|
||||
input = this.typeNameIdOfArrayOrSlice;
|
||||
}
|
||||
if (input === this.typeNameIdOfTuple || input === this.typeNameIdOfUnit) {
|
||||
input = this.typeNameIdOfTupleOrUnit;
|
||||
}
|
||||
if (input === this.typeNameIdOfFn || input === this.typeNameIdOfFnMut ||
|
||||
input === this.typeNameIdOfFnOnce) {
|
||||
input = this.typeNameIdOfHof;
|
||||
}
|
||||
/**
|
||||
* http://burtleburtle.net/bob/hash/integer.html
|
||||
* ~~ is toInt32. It's used before adding, so
|
||||
* the number stays in safe integer range.
|
||||
* @param {number} k
|
||||
*/
|
||||
const hashint1 = k => {
|
||||
k = (~~k + 0x7ed55d16) + (k << 12);
|
||||
k = (k ^ 0xc761c23c) ^ (k >>> 19);
|
||||
k = (~~k + 0x165667b1) + (k << 5);
|
||||
k = (~~k + 0xd3a2646c) ^ (k << 9);
|
||||
k = (~~k + 0xfd7046c5) + (k << 3);
|
||||
return (k ^ 0xb55a4f09) ^ (k >>> 16);
|
||||
return -1;
|
||||
};
|
||||
/** @param {number} k */
|
||||
const hashint2 = k => {
|
||||
k = ~k + (k << 15);
|
||||
k ^= k >>> 12;
|
||||
k += k << 2;
|
||||
k ^= k >>> 4;
|
||||
k = Math.imul(k, 2057);
|
||||
return k ^ (k >> 16);
|
||||
};
|
||||
if (input !== null) {
|
||||
const h0a = hashint1(input);
|
||||
const h0b = hashint2(input);
|
||||
// Less Hashing, Same Performance: Building a Better Bloom Filter
|
||||
// doi=10.1.1.72.2442
|
||||
const h1a = ~~(h0a + Math.imul(h0b, 2));
|
||||
const h1b = ~~(h0a + Math.imul(h0b, 3));
|
||||
const h2a = ~~(h0a + Math.imul(h0b, 4));
|
||||
const h2b = ~~(h0a + Math.imul(h0b, 5));
|
||||
output[0] |= (1 << (h0a % 32)) | (1 << (h1b % 32));
|
||||
output[1] |= (1 << (h1a % 32)) | (1 << (h2b % 32));
|
||||
output[2] |= (1 << (h2a % 32)) | (1 << (h0b % 32));
|
||||
// output[3] is the total number of items in the type signature
|
||||
output[3] += 1;
|
||||
}
|
||||
for (const g of type.generics) {
|
||||
this.buildFunctionTypeFingerprint(g, output);
|
||||
}
|
||||
/**
|
||||
* @type {{
|
||||
* id: number|null,
|
||||
* ty: number,
|
||||
* generics: rustdoc.FingerprintableType[],
|
||||
* bindings: Map<number, rustdoc.FingerprintableType[]>
|
||||
* }}
|
||||
*/
|
||||
const fb = {
|
||||
id: null,
|
||||
ty: 0,
|
||||
generics: this.EMPTY_GENERICS_ARRAY,
|
||||
bindings: this.EMPTY_BINDINGS_MAP,
|
||||
};
|
||||
for (const [k, v] of type.bindings.entries()) {
|
||||
fb.id = k;
|
||||
fb.generics = v;
|
||||
this.buildFunctionTypeFingerprint(fb, output);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert raw search index into in-memory search index.
|
||||
*
|
||||
* @param {Map<string, rustdoc.RawSearchIndexCrate>} rawSearchIndex
|
||||
* @returns {rustdoc.Row[]}
|
||||
*/
|
||||
buildIndex(rawSearchIndex) {
|
||||
/**
|
||||
* Convert from RawFunctionSearchType to FunctionSearchType.
|
||||
*
|
||||
* Crates often have lots of functions in them, and function signatures are sometimes
|
||||
* complex, so rustdoc uses a pretty tight encoding for them. This function converts it
|
||||
* to a simpler, object-based encoding so that the actual search code is more readable
|
||||
* and easier to debug.
|
||||
*
|
||||
* The raw function search type format is generated using serde in
|
||||
* librustdoc/html/render/mod.rs: IndexItemFunctionType::write_to_string
|
||||
*
|
||||
* @param {Array<{
|
||||
* name: string,
|
||||
* ty: number,
|
||||
* path: string|null,
|
||||
* exactPath: string|null,
|
||||
* unboxFlag: boolean
|
||||
* }>} paths
|
||||
* @param {Array<{
|
||||
* name: string,
|
||||
* ty: number,
|
||||
* path: string|null,
|
||||
* exactPath: string|null,
|
||||
* unboxFlag: boolean
|
||||
* }>} lowercasePaths
|
||||
*
|
||||
* @return {function(rustdoc.RawFunctionSearchType): null|rustdoc.FunctionSearchType}
|
||||
*/
|
||||
const buildFunctionSearchTypeCallback = (paths, lowercasePaths) => {
|
||||
/**
|
||||
* @param {rustdoc.RawFunctionSearchType} functionSearchType
|
||||
*/
|
||||
const cb = functionSearchType => {
|
||||
if (functionSearchType === 0) {
|
||||
return null;
|
||||
}
|
||||
const INPUTS_DATA = 0;
|
||||
const OUTPUT_DATA = 1;
|
||||
/** @type {rustdoc.FunctionType[]} */
|
||||
let inputs;
|
||||
/** @type {rustdoc.FunctionType[]} */
|
||||
let output;
|
||||
if (typeof functionSearchType[INPUTS_DATA] === "number") {
|
||||
inputs = [
|
||||
this.buildItemSearchType(
|
||||
functionSearchType[INPUTS_DATA],
|
||||
paths,
|
||||
lowercasePaths,
|
||||
),
|
||||
];
|
||||
} else {
|
||||
inputs = this.buildItemSearchTypeAll(
|
||||
functionSearchType[INPUTS_DATA],
|
||||
paths,
|
||||
lowercasePaths,
|
||||
);
|
||||
}
|
||||
if (functionSearchType.length > 1) {
|
||||
if (typeof functionSearchType[OUTPUT_DATA] === "number") {
|
||||
output = [
|
||||
this.buildItemSearchType(
|
||||
functionSearchType[OUTPUT_DATA],
|
||||
paths,
|
||||
lowercasePaths,
|
||||
),
|
||||
];
|
||||
} else {
|
||||
output = this.buildItemSearchTypeAll(
|
||||
// @ts-expect-error
|
||||
functionSearchType[OUTPUT_DATA],
|
||||
paths,
|
||||
lowercasePaths,
|
||||
);
|
||||
}
|
||||
} else {
|
||||
output = [];
|
||||
}
|
||||
const where_clause = [];
|
||||
const l = functionSearchType.length;
|
||||
for (let i = 2; i < l; ++i) {
|
||||
where_clause.push(typeof functionSearchType[i] === "number"
|
||||
// @ts-expect-error
|
||||
? [this.buildItemSearchType(functionSearchType[i], paths, lowercasePaths)]
|
||||
: this.buildItemSearchTypeAll(
|
||||
// @ts-expect-error
|
||||
functionSearchType[i],
|
||||
paths,
|
||||
lowercasePaths,
|
||||
));
|
||||
}
|
||||
return {
|
||||
inputs, output, where_clause,
|
||||
};
|
||||
};
|
||||
return cb;
|
||||
};
|
||||
|
||||
/** @type {rustdoc.Row[]} */
|
||||
const searchIndex = [];
|
||||
let currentIndex = 0;
|
||||
let id = 0;
|
||||
|
||||
// Function type fingerprints are 128-bit bloom filters that are used to
|
||||
// estimate the distance between function and query.
|
||||
// This loop counts the number of items to allocate a fingerprint for.
|
||||
for (const crate of rawSearchIndex.values()) {
|
||||
// Each item gets an entry in the fingerprint array, and the crate
|
||||
// does, too
|
||||
id += crate.t.length + 1;
|
||||
}
|
||||
this.functionTypeFingerprint = new Uint32Array((id + 1) * 4);
|
||||
// This loop actually generates the search item indexes, including
|
||||
// normalized names, type signature objects and fingerprints, and aliases.
|
||||
id = 0;
|
||||
|
||||
/** @type {Array<[string, { [key: string]: Array<number> }, number]>} */
|
||||
const allAliases = [];
|
||||
for (const [crate, crateCorpus] of rawSearchIndex) {
|
||||
// a string representing the lengths of each description shard
|
||||
// a string representing the list of function types
|
||||
const itemDescShardDecoder = new VlqHexDecoder(crateCorpus.D, noop => {
|
||||
/** @type {number} */
|
||||
// @ts-expect-error
|
||||
const n = noop;
|
||||
return n;
|
||||
});
|
||||
let descShard = {
|
||||
crate,
|
||||
shard: 0,
|
||||
start: 0,
|
||||
len: itemDescShardDecoder.next(),
|
||||
promise: null,
|
||||
resolve: null,
|
||||
};
|
||||
const descShardList = [descShard];
|
||||
|
||||
// Deprecated items and items with no description
|
||||
this.searchIndexDeprecated.set(crate, new RoaringBitmap(crateCorpus.c));
|
||||
this.searchIndexEmptyDesc.set(crate, new RoaringBitmap(crateCorpus.e));
|
||||
let descIndex = 0;
|
||||
|
||||
/**
|
||||
* List of generic function type parameter names.
|
||||
* Used for display, not for searching.
|
||||
* @type {string[]}
|
||||
*/
|
||||
let lastParamNames = [];
|
||||
|
||||
// This object should have exactly the same set of fields as the "row"
|
||||
// object defined below. Your JavaScript runtime will thank you.
|
||||
// https://mathiasbynens.be/notes/shapes-ics
|
||||
let normalizedName = crate.indexOf("_") === -1 ? crate : crate.replace(/_/g, "");
|
||||
const crateRow = {
|
||||
crate,
|
||||
ty: 3, // == ExternCrate
|
||||
name: crate,
|
||||
path: "",
|
||||
descShard,
|
||||
descIndex,
|
||||
exactPath: "",
|
||||
desc: crateCorpus.doc,
|
||||
parent: undefined,
|
||||
type: null,
|
||||
paramNames: lastParamNames,
|
||||
id,
|
||||
word: crate,
|
||||
normalizedName,
|
||||
bitIndex: 0,
|
||||
implDisambiguator: null,
|
||||
};
|
||||
this.nameTrie.insert(normalizedName, id, this.tailTable);
|
||||
id += 1;
|
||||
searchIndex.push(crateRow);
|
||||
currentIndex += 1;
|
||||
// it's not undefined
|
||||
// @ts-expect-error
|
||||
if (!this.searchIndexEmptyDesc.get(crate).contains(0)) {
|
||||
descIndex += 1;
|
||||
}
|
||||
|
||||
// see `RawSearchIndexCrate` in `rustdoc.d.ts` for a more
|
||||
// up to date description of these fields
|
||||
const itemTypes = crateCorpus.t;
|
||||
// an array of (String) item names
|
||||
const itemNames = crateCorpus.n;
|
||||
// an array of [(Number) item index,
|
||||
// (String) full path]
|
||||
// an item whose index is not present will fall back to the previous present path
|
||||
// i.e. if indices 4 and 11 are present, but 5-10 and 12-13 are not present,
|
||||
// 5-10 will fall back to the path for 4 and 12-13 will fall back to the path for 11
|
||||
const itemPaths = new Map(crateCorpus.q);
|
||||
// An array of [(Number) item index, (Number) path index]
|
||||
// Used to de-duplicate inlined and re-exported stuff
|
||||
const itemReexports = new Map(crateCorpus.r);
|
||||
// an array of (Number) the parent path index + 1 to `paths`, or 0 if none
|
||||
const itemParentIdxDecoder = new VlqHexDecoder(crateCorpus.i, noop => noop);
|
||||
// a map Number, string for impl disambiguators
|
||||
const implDisambiguator = new Map(crateCorpus.b);
|
||||
const rawPaths = crateCorpus.p;
|
||||
const aliases = crateCorpus.a;
|
||||
// an array of [(Number) item index,
|
||||
// (String) comma-separated list of function generic param names]
|
||||
// an item whose index is not present will fall back to the previous present path
|
||||
const itemParamNames = new Map(crateCorpus.P);
|
||||
|
||||
/**
|
||||
* @type {Array<{
|
||||
* name: string,
|
||||
* ty: number,
|
||||
* path: string|null,
|
||||
* exactPath: string|null,
|
||||
* unboxFlag: boolean
|
||||
* }>}
|
||||
*/
|
||||
const lowercasePaths = [];
|
||||
/**
|
||||
* @type {Array<{
|
||||
* name: string,
|
||||
* ty: number,
|
||||
* path: string|null,
|
||||
* exactPath: string|null,
|
||||
* unboxFlag: boolean
|
||||
* }>}
|
||||
*/
|
||||
const paths = [];
|
||||
|
||||
// a string representing the list of function types
|
||||
const itemFunctionDecoder = new VlqHexDecoder(
|
||||
crateCorpus.f,
|
||||
// @ts-expect-error
|
||||
buildFunctionSearchTypeCallback(paths, lowercasePaths),
|
||||
);
|
||||
|
||||
// convert `rawPaths` entries into object form
|
||||
// generate normalizedPaths for function search mode
|
||||
let len = rawPaths.length;
|
||||
let lastPath = undef2null(itemPaths.get(0));
|
||||
for (let i = 0; i < len; ++i) {
|
||||
const elem = rawPaths[i];
|
||||
const ty = elem[0];
|
||||
const name = elem[1];
|
||||
/**
|
||||
* @param {2|3} idx
|
||||
* @param {string|null} if_null
|
||||
* @param {string|null} if_not_found
|
||||
* @returns {string|null}
|
||||
*/
|
||||
const elemPath = (idx, if_null, if_not_found) => {
|
||||
if (elem.length > idx && elem[idx] !== undefined) {
|
||||
const p = itemPaths.get(elem[idx]);
|
||||
if (p !== undefined) {
|
||||
return p;
|
||||
}
|
||||
return if_not_found;
|
||||
}
|
||||
return if_null;
|
||||
};
|
||||
const path = elemPath(2, lastPath, null);
|
||||
const exactPath = elemPath(3, path, path);
|
||||
const unboxFlag = elem.length > 4 && !!elem[4];
|
||||
|
||||
lowercasePaths.push({ ty, name: name.toLowerCase(), path, exactPath, unboxFlag });
|
||||
paths[i] = { ty, name, path, exactPath, unboxFlag };
|
||||
}
|
||||
|
||||
// Convert `item*` into an object form, and construct word indices.
|
||||
//
|
||||
// Before any analysis is performed, let's gather the search terms to
|
||||
// search against apart from the rest of the data. This is a quick
|
||||
// operation that is cached for the life of the page state so that
|
||||
// all other search operations have access to this cached data for
|
||||
// faster analysis operations
|
||||
lastPath = "";
|
||||
len = itemTypes.length;
|
||||
let lastName = "";
|
||||
let lastWord = "";
|
||||
for (let i = 0; i < len; ++i) {
|
||||
const bitIndex = i + 1;
|
||||
if (descIndex >= descShard.len &&
|
||||
// @ts-expect-error
|
||||
!this.searchIndexEmptyDesc.get(crate).contains(bitIndex)) {
|
||||
descShard = {
|
||||
crate,
|
||||
shard: descShard.shard + 1,
|
||||
start: descShard.start + descShard.len,
|
||||
len: itemDescShardDecoder.next(),
|
||||
promise: null,
|
||||
resolve: null,
|
||||
};
|
||||
descIndex = 0;
|
||||
descShardList.push(descShard);
|
||||
}
|
||||
const name = itemNames[i] === "" ? lastName : itemNames[i];
|
||||
const word = itemNames[i] === "" ? lastWord : itemNames[i].toLowerCase();
|
||||
const pathU = itemPaths.get(i);
|
||||
const path = pathU !== undefined ? pathU : lastPath;
|
||||
const paramNameString = itemParamNames.get(i);
|
||||
const paramNames = paramNameString !== undefined ?
|
||||
paramNameString.split(",") :
|
||||
lastParamNames;
|
||||
const type = itemFunctionDecoder.next();
|
||||
if (type !== null) {
|
||||
if (type) {
|
||||
const fp = this.functionTypeFingerprint.subarray(id * 4, (id + 1) * 4);
|
||||
for (const t of type.inputs) {
|
||||
this.buildFunctionTypeFingerprint(t, fp);
|
||||
}
|
||||
for (const t of type.output) {
|
||||
this.buildFunctionTypeFingerprint(t, fp);
|
||||
}
|
||||
for (const w of type.where_clause) {
|
||||
for (const t of w) {
|
||||
this.buildFunctionTypeFingerprint(t, fp);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
// This object should have exactly the same set of fields as the "crateRow"
|
||||
// object defined above.
|
||||
const itemParentIdx = itemParentIdxDecoder.next();
|
||||
normalizedName = word.indexOf("_") === -1 ? word : word.replace(/_/g, "");
|
||||
/** @type {rustdoc.Row} */
|
||||
const row = {
|
||||
crate,
|
||||
ty: itemTypes.charCodeAt(i) - 65, // 65 = "A"
|
||||
name,
|
||||
path,
|
||||
descShard,
|
||||
descIndex,
|
||||
exactPath: itemReexports.has(i) ?
|
||||
// @ts-expect-error
|
||||
itemPaths.get(itemReexports.get(i)) : path,
|
||||
// @ts-expect-error
|
||||
parent: itemParentIdx > 0 ? paths[itemParentIdx - 1] : undefined,
|
||||
type,
|
||||
paramNames,
|
||||
id,
|
||||
word,
|
||||
normalizedName,
|
||||
bitIndex,
|
||||
implDisambiguator: undef2null(implDisambiguator.get(i)),
|
||||
};
|
||||
this.nameTrie.insert(normalizedName, id, this.tailTable);
|
||||
id += 1;
|
||||
searchIndex.push(row);
|
||||
lastPath = row.path;
|
||||
lastParamNames = row.paramNames;
|
||||
// @ts-expect-error
|
||||
if (!this.searchIndexEmptyDesc.get(crate).contains(bitIndex)) {
|
||||
descIndex += 1;
|
||||
}
|
||||
lastName = name;
|
||||
lastWord = word;
|
||||
}
|
||||
|
||||
if (aliases) {
|
||||
// We need to add the aliases in `searchIndex` after we finished filling it
|
||||
// to not mess up indexes.
|
||||
allAliases.push([crate, aliases, currentIndex]);
|
||||
}
|
||||
currentIndex += itemTypes.length;
|
||||
this.searchState.descShards.set(crate, descShardList);
|
||||
}
|
||||
|
||||
for (const [crate, aliases, index] of allAliases) {
|
||||
for (const [alias_name, alias_refs] of Object.entries(aliases)) {
|
||||
if (!this.ALIASES.has(crate)) {
|
||||
this.ALIASES.set(crate, new Map());
|
||||
}
|
||||
const word = alias_name.toLowerCase();
|
||||
const crate_alias_map = this.ALIASES.get(crate);
|
||||
if (!crate_alias_map.has(word)) {
|
||||
crate_alias_map.set(word, []);
|
||||
}
|
||||
const aliases_map = crate_alias_map.get(word);
|
||||
|
||||
const normalizedName = word.indexOf("_") === -1 ? word : word.replace(/_/g, "");
|
||||
for (const alias of alias_refs) {
|
||||
const originalIndex = alias + index;
|
||||
const original = searchIndex[originalIndex];
|
||||
/** @type {rustdoc.Row} */
|
||||
const row = {
|
||||
crate,
|
||||
name: alias_name,
|
||||
normalizedName,
|
||||
is_alias: true,
|
||||
ty: original.ty,
|
||||
type: original.type,
|
||||
paramNames: [],
|
||||
word,
|
||||
id,
|
||||
parent: undefined,
|
||||
original,
|
||||
path: "",
|
||||
implDisambiguator: original.implDisambiguator,
|
||||
// Needed to load the description of the original item.
|
||||
// @ts-ignore
|
||||
descShard: original.descShard,
|
||||
descIndex: original.descIndex,
|
||||
bitIndex: original.bitIndex,
|
||||
};
|
||||
aliases_map.push(row);
|
||||
this.nameTrie.insert(normalizedName, id, this.tailTable);
|
||||
id += 1;
|
||||
searchIndex.push(row);
|
||||
}
|
||||
}
|
||||
}
|
||||
// Drop the (rather large) hash table used for reusing function items
|
||||
this.TYPES_POOL = new Map();
|
||||
return searchIndex;
|
||||
this.typeNameIdOfOutput = await first(output, TY_ASSOCTYPE, "");
|
||||
this.typeNameIdOfFnPtr = await first(fn, TY_PRIMITIVE, "");
|
||||
this.typeNameIdOfFn = await first(fn, TY_TRAIT, "core::ops");
|
||||
this.typeNameIdOfFnMut = await first(fnMut, TY_TRAIT, "core::ops");
|
||||
this.typeNameIdOfFnOnce = await first(fnOnce, TY_TRAIT, "core::ops");
|
||||
this.typeNameIdOfArray = await first(array, TY_PRIMITIVE, "");
|
||||
this.typeNameIdOfSlice = await first(slice, TY_PRIMITIVE, "");
|
||||
this.typeNameIdOfArrayOrSlice = await first(arrayOrSlice, TY_PRIMITIVE, "");
|
||||
this.typeNameIdOfTuple = await first(tuple, TY_PRIMITIVE, "");
|
||||
this.typeNameIdOfUnit = await first(unit, TY_PRIMITIVE, "");
|
||||
this.typeNameIdOfTupleOrUnit = await first(tupleOrUnit, TY_PRIMITIVE, "");
|
||||
this.typeNameIdOfReference = await first(reference, TY_PRIMITIVE, "");
|
||||
this.typeNameIdOfHof = await first(hof, TY_PRIMITIVE, "");
|
||||
this.typeNameIdOfNever = await first(never, TY_PRIMITIVE, "");
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -2342,41 +1287,6 @@ class DocSearch {
|
||||
* @return {rustdoc.ParsedQuery<rustdoc.ParserQueryElement>} - The parsed query
|
||||
*/
|
||||
static parseQuery(userQuery) {
|
||||
/**
|
||||
* @param {string} typename
|
||||
* @returns {number}
|
||||
*/
|
||||
function itemTypeFromName(typename) {
|
||||
const index = itemTypes.findIndex(i => i === typename);
|
||||
if (index < 0) {
|
||||
throw ["Unknown type filter ", typename];
|
||||
}
|
||||
return index;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {rustdoc.ParserQueryElement} elem
|
||||
*/
|
||||
function convertTypeFilterOnElem(elem) {
|
||||
if (typeof elem.typeFilter === "string") {
|
||||
let typeFilter = elem.typeFilter;
|
||||
if (typeFilter === "const") {
|
||||
typeFilter = "constant";
|
||||
}
|
||||
elem.typeFilter = itemTypeFromName(typeFilter);
|
||||
} else {
|
||||
elem.typeFilter = NO_TYPE_FILTER;
|
||||
}
|
||||
for (const elem2 of elem.generics) {
|
||||
convertTypeFilterOnElem(elem2);
|
||||
}
|
||||
for (const constraints of elem.bindings.values()) {
|
||||
for (const constraint of constraints) {
|
||||
convertTypeFilterOnElem(constraint);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Takes the user search input and returns an empty `ParsedQuery`.
|
||||
*
|
||||
@@ -2437,8 +1347,7 @@ class DocSearch {
|
||||
continue;
|
||||
}
|
||||
if (!foundStopChar) {
|
||||
/** @type String[] */
|
||||
let extra = [];
|
||||
let extra = EMPTY_STRING_ARRAY;
|
||||
if (isLastElemGeneric(query.elems, parserState)) {
|
||||
extra = [" after ", ">"];
|
||||
} else if (prevIs(parserState, "\"")) {
|
||||
@@ -2515,11 +1424,33 @@ class DocSearch {
|
||||
|
||||
try {
|
||||
parseInput(query, parserState);
|
||||
|
||||
// Scan for invalid type filters, so that we can report the error
|
||||
// outside the search loop.
|
||||
/** @param {rustdoc.ParserQueryElement} elem */
|
||||
const checkTypeFilter = elem => {
|
||||
const ty = itemTypeFromName(elem.typeFilter);
|
||||
if (ty === TY_GENERIC && elem.generics.length !== 0) {
|
||||
throw [
|
||||
"Generic type parameter ",
|
||||
elem.name,
|
||||
" does not accept generic parameters",
|
||||
];
|
||||
}
|
||||
for (const generic of elem.generics) {
|
||||
checkTypeFilter(generic);
|
||||
}
|
||||
for (const constraints of elem.bindings.values()) {
|
||||
for (const constraint of constraints) {
|
||||
checkTypeFilter(constraint);
|
||||
}
|
||||
}
|
||||
};
|
||||
for (const elem of query.elems) {
|
||||
convertTypeFilterOnElem(elem);
|
||||
checkTypeFilter(elem);
|
||||
}
|
||||
for (const elem of query.returned) {
|
||||
convertTypeFilterOnElem(elem);
|
||||
checkTypeFilter(elem);
|
||||
}
|
||||
} catch (err) {
|
||||
query = newParsedQuery(userQuery);
|
||||
@@ -2542,209 +1473,574 @@ class DocSearch {
|
||||
return query;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {number} id
|
||||
* @returns {Promise<string|null>}
|
||||
*/
|
||||
async getName(id) {
|
||||
const ni = this.database.getData("name");
|
||||
if (!ni) {
|
||||
return null;
|
||||
}
|
||||
const name = await ni.at(id);
|
||||
return name === undefined || name === null ? null : this.utf8decoder.decode(name);
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {number} id
|
||||
* @returns {Promise<string|null>}
|
||||
*/
|
||||
async getDesc(id) {
|
||||
const di = this.database.getData("desc");
|
||||
if (!di) {
|
||||
return null;
|
||||
}
|
||||
const desc = await di.at(id);
|
||||
return desc === undefined || desc === null ? null : this.utf8decoder.decode(desc);
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {number} id
|
||||
* @returns {Promise<number|null>}
|
||||
*/
|
||||
async getAliasTarget(id) {
|
||||
const ai = this.database.getData("alias");
|
||||
if (!ai) {
|
||||
return null;
|
||||
}
|
||||
const bytes = await ai.at(id);
|
||||
if (bytes === undefined || bytes === null || bytes.length === 0) {
|
||||
return null;
|
||||
} else {
|
||||
/** @type {string} */
|
||||
const encoded = this.utf8decoder.decode(bytes);
|
||||
/** @type {number|null} */
|
||||
const decoded = JSON.parse(encoded);
|
||||
return decoded;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {number} id
|
||||
* @returns {Promise<rustdoc.EntryData|null>}
|
||||
*/
|
||||
async getEntryData(id) {
|
||||
const ei = this.database.getData("entry");
|
||||
if (!ei) {
|
||||
return null;
|
||||
}
|
||||
const encoded = this.utf8decoder.decode(await ei.at(id));
|
||||
if (encoded === "" || encoded === undefined || encoded === null) {
|
||||
return null;
|
||||
}
|
||||
/**
|
||||
* krate,
|
||||
* ty,
|
||||
* module_path,
|
||||
* exact_module_path,
|
||||
* parent,
|
||||
* deprecated,
|
||||
* associated_item_disambiguator
|
||||
* @type {rustdoc.ArrayWithOptionals<[
|
||||
* number,
|
||||
* rustdoc.ItemType,
|
||||
* number,
|
||||
* number,
|
||||
* number,
|
||||
* number,
|
||||
* ], [string]>}
|
||||
*/
|
||||
const raw = JSON.parse(encoded);
|
||||
return {
|
||||
krate: raw[0],
|
||||
ty: raw[1],
|
||||
modulePath: raw[2] === 0 ? null : raw[2] - 1,
|
||||
exactModulePath: raw[3] === 0 ? null : raw[3] - 1,
|
||||
parent: raw[4] === 0 ? null : raw[4] - 1,
|
||||
deprecated: raw[5] === 1 ? true : false,
|
||||
associatedItemDisambiguator: raw.length === 6 ? null : raw[6],
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {number} id
|
||||
* @returns {Promise<rustdoc.PathData|null>}
|
||||
*/
|
||||
async getPathData(id) {
|
||||
const pi = this.database.getData("path");
|
||||
if (!pi) {
|
||||
return null;
|
||||
}
|
||||
const encoded = this.utf8decoder.decode(await pi.at(id));
|
||||
if (encoded === "" || encoded === undefined || encoded === null) {
|
||||
return null;
|
||||
}
|
||||
/**
|
||||
* ty, module_path, exact_module_path, search_unbox, inverted_function_signature_index
|
||||
* @type {rustdoc.ArrayWithOptionals<[rustdoc.ItemType, string], [string|0, 0|1, string]>}
|
||||
*/
|
||||
const raw = JSON.parse(encoded);
|
||||
return {
|
||||
ty: raw[0],
|
||||
modulePath: raw[1],
|
||||
exactModulePath: raw[2] === 0 || raw[2] === undefined ? raw[1] : raw[2],
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {number} id
|
||||
* @returns {Promise<rustdoc.FunctionData|null>}
|
||||
*/
|
||||
async getFunctionData(id) {
|
||||
const fi = this.database.getData("function");
|
||||
if (!fi) {
|
||||
return null;
|
||||
}
|
||||
const encoded = this.utf8decoder.decode(await fi.at(id));
|
||||
if (encoded === "" || encoded === undefined || encoded === null) {
|
||||
return null;
|
||||
}
|
||||
/**
|
||||
* function_signature, param_names
|
||||
* @type {[string, string[]]}
|
||||
*/
|
||||
const raw = JSON.parse(encoded);
|
||||
|
||||
const parser = new VlqHexDecoder(raw[0], async functionSearchType => {
|
||||
if (typeof functionSearchType === "number") {
|
||||
return null;
|
||||
}
|
||||
const INPUTS_DATA = 0;
|
||||
const OUTPUT_DATA = 1;
|
||||
/** @type {Promise<rustdoc.FunctionType[]>} */
|
||||
let inputs_;
|
||||
/** @type {Promise<rustdoc.FunctionType[]>} */
|
||||
let output_;
|
||||
if (typeof functionSearchType[INPUTS_DATA] === "number") {
|
||||
inputs_ = Promise.all([
|
||||
this.buildItemSearchType(functionSearchType[INPUTS_DATA]),
|
||||
]);
|
||||
} else {
|
||||
// @ts-ignore
|
||||
inputs_ = this.buildItemSearchTypeAll(functionSearchType[INPUTS_DATA]);
|
||||
}
|
||||
if (functionSearchType.length > 1) {
|
||||
if (typeof functionSearchType[OUTPUT_DATA] === "number") {
|
||||
output_ = Promise.all([
|
||||
this.buildItemSearchType(functionSearchType[OUTPUT_DATA]),
|
||||
]);
|
||||
} else {
|
||||
// @ts-expect-error
|
||||
output_ = this.buildItemSearchTypeAll(functionSearchType[OUTPUT_DATA]);
|
||||
}
|
||||
} else {
|
||||
output_ = Promise.resolve(EMPTY_GENERICS_ARRAY);
|
||||
}
|
||||
/** @type {Promise<rustdoc.FunctionType[]>[]} */
|
||||
const where_clause_ = [];
|
||||
const l = functionSearchType.length;
|
||||
for (let i = 2; i < l; ++i) {
|
||||
where_clause_.push(typeof functionSearchType[i] === "number"
|
||||
// @ts-expect-error
|
||||
? Promise.all([this.buildItemSearchType(functionSearchType[i])])
|
||||
// @ts-expect-error
|
||||
: this.buildItemSearchTypeAll(functionSearchType[i]),
|
||||
);
|
||||
}
|
||||
const [inputs, output, where_clause] = await Promise.all([
|
||||
inputs_,
|
||||
output_,
|
||||
Promise.all(where_clause_),
|
||||
]);
|
||||
return {
|
||||
inputs, output, where_clause,
|
||||
};
|
||||
});
|
||||
|
||||
return {
|
||||
functionSignature: await parser.next(),
|
||||
paramNames: raw[1],
|
||||
elemCount: parser.elemCount,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {number} id
|
||||
* @returns {Promise<rustdoc.TypeData|null>}
|
||||
*/
|
||||
async getTypeData(id) {
|
||||
const ti = this.database.getData("type");
|
||||
if (!ti) {
|
||||
return null;
|
||||
}
|
||||
const encoded = this.utf8decoder.decode(await ti.at(id));
|
||||
if (encoded === "" || encoded === undefined || encoded === null) {
|
||||
return null;
|
||||
}
|
||||
/**
|
||||
* function_signature, param_names
|
||||
* @type {[string, number] | [number] | [string] | [] | null}
|
||||
*/
|
||||
const raw = JSON.parse(encoded);
|
||||
|
||||
if (!raw || raw.length === 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
let searchUnbox = false;
|
||||
const invertedFunctionSignatureIndex = [];
|
||||
|
||||
if (typeof raw[0] === "string") {
|
||||
if (raw[1]) {
|
||||
searchUnbox = true;
|
||||
}
|
||||
// the inverted function signature index is a list of bitmaps,
|
||||
// by number of types that appear in the function
|
||||
let i = 0;
|
||||
const pb = makeUint8ArrayFromBase64(raw[0]);
|
||||
const l = pb.length;
|
||||
while (i < l) {
|
||||
if (pb[i] === 0) {
|
||||
invertedFunctionSignatureIndex.push(RoaringBitmap.empty());
|
||||
i += 1;
|
||||
} else {
|
||||
const bitmap = new RoaringBitmap(pb, i);
|
||||
i += bitmap.consumed_len_bytes;
|
||||
invertedFunctionSignatureIndex.push(bitmap);
|
||||
}
|
||||
}
|
||||
} else if (raw[0]) {
|
||||
searchUnbox = true;
|
||||
}
|
||||
|
||||
return { searchUnbox, invertedFunctionSignatureIndex };
|
||||
}
|
||||
|
||||
/**
|
||||
* @returns {Promise<string[]>}
|
||||
*/
|
||||
async getCrateNameList() {
|
||||
const crateNames = this.database.getData("crateNames");
|
||||
if (!crateNames) {
|
||||
return [];
|
||||
}
|
||||
const l = crateNames.length;
|
||||
const names = [];
|
||||
for (let i = 0; i < l; ++i) {
|
||||
names.push(crateNames.at(i).then(name => {
|
||||
if (name === undefined) {
|
||||
return "";
|
||||
}
|
||||
return this.utf8decoder.decode(name);
|
||||
}));
|
||||
}
|
||||
return Promise.all(names);
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {number} id non-negative generic index
|
||||
* @returns {Promise<stringdex.RoaringBitmap[]>}
|
||||
*/
|
||||
async getGenericInvertedIndex(id) {
|
||||
const gii = this.database.getData("generic_inverted_index");
|
||||
if (!gii) {
|
||||
return [];
|
||||
}
|
||||
const pb = await gii.at(id);
|
||||
if (pb === undefined || pb === null || pb.length === 0) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const invertedFunctionSignatureIndex = [];
|
||||
// the inverted function signature index is a list of bitmaps,
|
||||
// by number of types that appear in the function
|
||||
let i = 0;
|
||||
const l = pb.length;
|
||||
while (i < l) {
|
||||
if (pb[i] === 0) {
|
||||
invertedFunctionSignatureIndex.push(RoaringBitmap.empty());
|
||||
i += 1;
|
||||
} else {
|
||||
const bitmap = new RoaringBitmap(pb, i);
|
||||
i += bitmap.consumed_len_bytes;
|
||||
invertedFunctionSignatureIndex.push(bitmap);
|
||||
}
|
||||
}
|
||||
return invertedFunctionSignatureIndex;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {number} id
|
||||
* @returns {Promise<rustdoc.Row?>}
|
||||
*/
|
||||
async getRow(id) {
|
||||
const [name_, entry, path, type] = await Promise.all([
|
||||
this.getName(id),
|
||||
this.getEntryData(id),
|
||||
this.getPathData(id),
|
||||
this.getFunctionData(id),
|
||||
]);
|
||||
if (!entry && !path) {
|
||||
return null;
|
||||
}
|
||||
const [
|
||||
moduleName,
|
||||
modulePathData,
|
||||
exactModuleName,
|
||||
exactModulePathData,
|
||||
] = await Promise.all([
|
||||
entry && entry.modulePath !== null ? this.getName(entry.modulePath) : null,
|
||||
entry && entry.modulePath !== null ? this.getPathData(entry.modulePath) : null,
|
||||
entry && entry.exactModulePath !== null ?
|
||||
this.getName(entry.exactModulePath) :
|
||||
null,
|
||||
entry && entry.exactModulePath !== null ?
|
||||
this.getPathData(entry.exactModulePath) :
|
||||
null,
|
||||
]);
|
||||
const name = name_ === null ? "" : name_;
|
||||
const normalizedName = (name.indexOf("_") === -1 ?
|
||||
name :
|
||||
name.replace(/_/g, "")).toLowerCase();
|
||||
const modulePath = modulePathData === null || moduleName === null ? "" :
|
||||
(modulePathData.modulePath === "" ?
|
||||
moduleName :
|
||||
`${modulePathData.modulePath}::${moduleName}`);
|
||||
const [parentName, parentPath] = entry !== null && entry.parent !== null ?
|
||||
await Promise.all([this.getName(entry.parent), this.getPathData(entry.parent)]) :
|
||||
[null, null];
|
||||
return {
|
||||
id,
|
||||
crate: entry ? nonnull(await this.getName(entry.krate)) : "",
|
||||
ty: entry ? entry.ty : nonnull(path).ty,
|
||||
name,
|
||||
normalizedName,
|
||||
modulePath,
|
||||
exactModulePath: exactModulePathData === null || exactModuleName === null ? modulePath :
|
||||
(exactModulePathData.exactModulePath === "" ?
|
||||
exactModuleName :
|
||||
`${exactModulePathData.exactModulePath}::${exactModuleName}`),
|
||||
entry,
|
||||
path,
|
||||
type,
|
||||
deprecated: entry ? entry.deprecated : false,
|
||||
parent: parentName !== null && parentPath !== null ?
|
||||
{ name: parentName, path: parentPath } :
|
||||
null,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert a list of RawFunctionType / ID to object-based FunctionType.
|
||||
*
|
||||
* Crates often have lots of functions in them, and it's common to have a large number of
|
||||
* functions that operate on a small set of data types, so the search index compresses them
|
||||
* by encoding function parameter and return types as indexes into an array of names.
|
||||
*
|
||||
* Even when a general-purpose compression algorithm is used, this is still a win.
|
||||
* I checked. https://github.com/rust-lang/rust/pull/98475#issue-1284395985
|
||||
*
|
||||
* The format for individual function types is encoded in
|
||||
* librustdoc/html/render/mod.rs: impl Serialize for RenderType
|
||||
*
|
||||
* @param {null|Array<rustdoc.RawFunctionType>} types
|
||||
*
|
||||
* @return {Promise<Array<rustdoc.FunctionType>>}
|
||||
*/
|
||||
async buildItemSearchTypeAll(types) {
|
||||
return types && types.length > 0 ?
|
||||
await Promise.all(types.map(type => this.buildItemSearchType(type))) :
|
||||
EMPTY_GENERICS_ARRAY;
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts a single type.
|
||||
*
|
||||
* @param {rustdoc.RawFunctionType} type
|
||||
* @return {Promise<rustdoc.FunctionType>}
|
||||
*/
|
||||
async buildItemSearchType(type) {
|
||||
const PATH_INDEX_DATA = 0;
|
||||
const GENERICS_DATA = 1;
|
||||
const BINDINGS_DATA = 2;
|
||||
let id, generics;
|
||||
/**
|
||||
* @type {Map<number, rustdoc.FunctionType[]>}
|
||||
*/
|
||||
let bindings;
|
||||
if (typeof type === "number") {
|
||||
id = type;
|
||||
generics = EMPTY_GENERICS_ARRAY;
|
||||
bindings = EMPTY_BINDINGS_MAP;
|
||||
} else {
|
||||
id = type[PATH_INDEX_DATA];
|
||||
generics = await this.buildItemSearchTypeAll(type[GENERICS_DATA]);
|
||||
if (type[BINDINGS_DATA] && type[BINDINGS_DATA].length > 0) {
|
||||
bindings = new Map((await Promise.all(type[BINDINGS_DATA].map(
|
||||
/**
|
||||
* @param {[rustdoc.RawFunctionType, rustdoc.RawFunctionType[]]} binding
|
||||
* @returns {Promise<[number, rustdoc.FunctionType[]][]>}
|
||||
*/
|
||||
async binding => {
|
||||
const [assocType, constraints] = binding;
|
||||
// Associated type constructors are represented sloppily in rustdoc's
|
||||
// type search, to make the engine simpler.
|
||||
//
|
||||
// MyType<Output<T>=Result<T>> is equivalent to MyType<Output<Result<T>>=T>
|
||||
// and both are, essentially
|
||||
// MyType<Output=(T, Result<T>)>, except the tuple isn't actually there.
|
||||
// It's more like the value of a type binding is naturally an array,
|
||||
// which rustdoc calls "constraints".
|
||||
//
|
||||
// As a result, the key should never have generics on it.
|
||||
const [k, v] = await Promise.all([
|
||||
this.buildItemSearchType(assocType).then(t => t.id),
|
||||
this.buildItemSearchTypeAll(constraints),
|
||||
]);
|
||||
return k === null ? EMPTY_BINDINGS_ARRAY : [[k, v]];
|
||||
},
|
||||
))).flat());
|
||||
} else {
|
||||
bindings = EMPTY_BINDINGS_MAP;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @type {rustdoc.FunctionType}
|
||||
*/
|
||||
let result;
|
||||
if (id < 0) {
|
||||
// types less than 0 are generic parameters
|
||||
// the actual names of generic parameters aren't stored, since they aren't API
|
||||
result = {
|
||||
id,
|
||||
name: "",
|
||||
ty: TY_GENERIC,
|
||||
path: null,
|
||||
exactPath: null,
|
||||
generics,
|
||||
bindings,
|
||||
unboxFlag: true,
|
||||
};
|
||||
} else if (id === 0) {
|
||||
// `0` is used as a sentinel because it's fewer bytes than `null`
|
||||
result = {
|
||||
id: null,
|
||||
name: "",
|
||||
ty: TY_GENERIC,
|
||||
path: null,
|
||||
exactPath: null,
|
||||
generics,
|
||||
bindings,
|
||||
unboxFlag: true,
|
||||
};
|
||||
} else {
|
||||
const [name, path, type] = await Promise.all([
|
||||
this.getName(id - 1),
|
||||
this.getPathData(id - 1),
|
||||
this.getTypeData(id - 1),
|
||||
]);
|
||||
if (path === undefined || path === null || type === undefined || type === null) {
|
||||
return {
|
||||
id: null,
|
||||
name: "",
|
||||
ty: TY_GENERIC,
|
||||
path: null,
|
||||
exactPath: null,
|
||||
generics,
|
||||
bindings,
|
||||
unboxFlag: true,
|
||||
};
|
||||
}
|
||||
result = {
|
||||
id: id - 1,
|
||||
name,
|
||||
ty: path.ty,
|
||||
path: path.modulePath,
|
||||
exactPath: path.exactModulePath === null ? path.modulePath : path.exactModulePath,
|
||||
generics,
|
||||
bindings,
|
||||
unboxFlag: type.searchUnbox,
|
||||
};
|
||||
}
|
||||
const cr = this.TYPES_POOL.get(result.id);
|
||||
if (cr) {
|
||||
// Shallow equality check. Since this function is used
|
||||
// to construct every type object, this should be mostly
|
||||
// equivalent to a deep equality check, except if there's
|
||||
// a conflict, we don't keep the old one around, so it's
|
||||
// not a fully precise implementation of hashcons.
|
||||
if (cr.generics.length === result.generics.length &&
|
||||
cr.generics !== result.generics &&
|
||||
cr.generics.every((x, i) => result.generics[i] === x)
|
||||
) {
|
||||
result.generics = cr.generics;
|
||||
}
|
||||
if (cr.bindings.size === result.bindings.size && cr.bindings !== result.bindings) {
|
||||
let ok = true;
|
||||
for (const [k, v] of cr.bindings.entries()) {
|
||||
const v2 = result.bindings.get(k);
|
||||
if (!v2) {
|
||||
ok = false;
|
||||
break;
|
||||
}
|
||||
if (v !== v2 && v.length === v2.length && v.every((x, i) => v2[i] === x)) {
|
||||
result.bindings.set(k, v);
|
||||
} else if (v !== v2) {
|
||||
ok = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (ok) {
|
||||
result.bindings = cr.bindings;
|
||||
}
|
||||
}
|
||||
if (cr.ty === result.ty && cr.path === result.path
|
||||
&& cr.bindings === result.bindings && cr.generics === result.generics
|
||||
&& cr.ty === result.ty && cr.name === result.name
|
||||
&& cr.unboxFlag === result.unboxFlag
|
||||
) {
|
||||
return cr;
|
||||
}
|
||||
}
|
||||
this.TYPES_POOL.set(result.id, result);
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Executes the parsed query and builds a {ResultsTable}.
|
||||
*
|
||||
* @param {rustdoc.ParsedQuery<rustdoc.ParserQueryElement>} origParsedQuery
|
||||
* @param {rustdoc.ParsedQuery<rustdoc.ParserQueryElement>} parsedQuery
|
||||
* - The parsed user query
|
||||
* @param {Object} filterCrates - Crate to search in if defined
|
||||
* @param {string} currentCrate - Current crate, to rank results from this crate higher
|
||||
*
|
||||
* @return {Promise<rustdoc.ResultsTable>}
|
||||
*/
|
||||
async execQuery(origParsedQuery, filterCrates, currentCrate) {
|
||||
/** @type {rustdoc.Results} */
|
||||
const results_others = new Map(),
|
||||
/** @type {rustdoc.Results} */
|
||||
results_in_args = new Map(),
|
||||
/** @type {rustdoc.Results} */
|
||||
results_returned = new Map();
|
||||
|
||||
/** @type {rustdoc.ParsedQuery<rustdoc.QueryElement>} */
|
||||
// @ts-expect-error
|
||||
const parsedQuery = origParsedQuery;
|
||||
|
||||
async execQuery(parsedQuery, filterCrates, currentCrate) {
|
||||
const queryLen =
|
||||
parsedQuery.elems.reduce((acc, next) => acc + next.pathLast.length, 0) +
|
||||
parsedQuery.returned.reduce((acc, next) => acc + next.pathLast.length, 0);
|
||||
const maxEditDistance = Math.floor(queryLen / 3);
|
||||
// We reinitialize the `FOUND_ALIASES` map.
|
||||
this.FOUND_ALIASES.clear();
|
||||
|
||||
/**
|
||||
* @type {Map<string, number>}
|
||||
* @param {rustdoc.Row} item
|
||||
* @returns {[string, string, string]}
|
||||
*/
|
||||
const genericSymbols = new Map();
|
||||
|
||||
/**
|
||||
* Convert names to ids in parsed query elements.
|
||||
* This is not used for the "In Names" tab, but is used for the
|
||||
* "In Params", "In Returns", and "In Function Signature" tabs.
|
||||
*
|
||||
* If there is no matching item, but a close-enough match, this
|
||||
* function also that correction.
|
||||
*
|
||||
* See `buildTypeMapIndex` for more information.
|
||||
*
|
||||
* @param {rustdoc.QueryElement} elem
|
||||
* @param {boolean=} isAssocType
|
||||
*/
|
||||
const convertNameToId = (elem, isAssocType) => {
|
||||
const loweredName = elem.pathLast.toLowerCase();
|
||||
if (this.typeNameIdMap.has(loweredName) &&
|
||||
// @ts-expect-error
|
||||
(isAssocType || !this.typeNameIdMap.get(loweredName).assocOnly)) {
|
||||
// @ts-expect-error
|
||||
elem.id = this.typeNameIdMap.get(loweredName).id;
|
||||
} else if (!parsedQuery.literalSearch) {
|
||||
let match = null;
|
||||
let matchDist = maxEditDistance + 1;
|
||||
let matchName = "";
|
||||
for (const [name, { id, assocOnly }] of this.typeNameIdMap) {
|
||||
const dist = Math.min(
|
||||
editDistance(name, loweredName, maxEditDistance),
|
||||
editDistance(name, elem.normalizedPathLast, maxEditDistance),
|
||||
);
|
||||
if (dist <= matchDist && dist <= maxEditDistance &&
|
||||
(isAssocType || !assocOnly)) {
|
||||
if (dist === matchDist && matchName > name) {
|
||||
continue;
|
||||
}
|
||||
match = id;
|
||||
matchDist = dist;
|
||||
matchName = name;
|
||||
}
|
||||
}
|
||||
if (match !== null) {
|
||||
parsedQuery.correction = matchName;
|
||||
}
|
||||
elem.id = match;
|
||||
}
|
||||
if ((elem.id === null && parsedQuery.totalElems > 1 && elem.typeFilter === -1
|
||||
&& elem.generics.length === 0 && elem.bindings.size === 0)
|
||||
|| elem.typeFilter === TY_GENERIC) {
|
||||
const id = genericSymbols.get(elem.normalizedPathLast);
|
||||
if (id !== undefined) {
|
||||
elem.id = id;
|
||||
} else {
|
||||
elem.id = -(genericSymbols.size + 1);
|
||||
genericSymbols.set(elem.normalizedPathLast, elem.id);
|
||||
}
|
||||
if (elem.typeFilter === -1 && elem.normalizedPathLast.length >= 3) {
|
||||
// Silly heuristic to catch if the user probably meant
|
||||
// to not write a generic parameter. We don't use it,
|
||||
// just bring it up.
|
||||
const maxPartDistance = Math.floor(elem.normalizedPathLast.length / 3);
|
||||
let matchDist = maxPartDistance + 1;
|
||||
let matchName = "";
|
||||
for (const name of this.typeNameIdMap.keys()) {
|
||||
const dist = editDistance(
|
||||
name,
|
||||
elem.normalizedPathLast,
|
||||
maxPartDistance,
|
||||
);
|
||||
if (dist <= matchDist && dist <= maxPartDistance) {
|
||||
if (dist === matchDist && matchName > name) {
|
||||
continue;
|
||||
}
|
||||
matchDist = dist;
|
||||
matchName = name;
|
||||
}
|
||||
}
|
||||
if (matchName !== "") {
|
||||
parsedQuery.proposeCorrectionFrom = elem.name;
|
||||
parsedQuery.proposeCorrectionTo = matchName;
|
||||
}
|
||||
}
|
||||
elem.typeFilter = TY_GENERIC;
|
||||
}
|
||||
if (elem.generics.length > 0 && elem.typeFilter === TY_GENERIC) {
|
||||
// Rust does not have HKT
|
||||
parsedQuery.error = [
|
||||
"Generic type parameter ",
|
||||
elem.name,
|
||||
" does not accept generic parameters",
|
||||
];
|
||||
}
|
||||
for (const elem2 of elem.generics) {
|
||||
convertNameToId(elem2);
|
||||
}
|
||||
elem.bindings = new Map(Array.from(elem.bindings.entries())
|
||||
.map(entry => {
|
||||
const [name, constraints] = entry;
|
||||
// @ts-expect-error
|
||||
if (!this.typeNameIdMap.has(name)) {
|
||||
parsedQuery.error = [
|
||||
"Type parameter ",
|
||||
// @ts-expect-error
|
||||
name,
|
||||
" does not exist",
|
||||
];
|
||||
return [0, []];
|
||||
}
|
||||
for (const elem2 of constraints) {
|
||||
convertNameToId(elem2, false);
|
||||
}
|
||||
|
||||
// @ts-expect-error
|
||||
return [this.typeNameIdMap.get(name).id, constraints];
|
||||
}),
|
||||
);
|
||||
};
|
||||
|
||||
for (const elem of parsedQuery.elems) {
|
||||
convertNameToId(elem, false);
|
||||
this.buildFunctionTypeFingerprint(elem, parsedQuery.typeFingerprint);
|
||||
}
|
||||
for (const elem of parsedQuery.returned) {
|
||||
convertNameToId(elem, false);
|
||||
this.buildFunctionTypeFingerprint(elem, parsedQuery.typeFingerprint);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Creates the query results.
|
||||
*
|
||||
* @param {Array<rustdoc.ResultObject>} results_in_args
|
||||
* @param {Array<rustdoc.ResultObject>} results_returned
|
||||
* @param {Array<rustdoc.ResultObject>} results_others
|
||||
* @param {rustdoc.ParsedQuery<rustdoc.QueryElement>} parsedQuery
|
||||
*
|
||||
* @return {rustdoc.ResultsTable}
|
||||
*/
|
||||
function createQueryResults(
|
||||
results_in_args,
|
||||
results_returned,
|
||||
results_others,
|
||||
parsedQuery) {
|
||||
return {
|
||||
"in_args": results_in_args,
|
||||
"returned": results_returned,
|
||||
"others": results_others,
|
||||
"query": parsedQuery,
|
||||
};
|
||||
}
|
||||
|
||||
// @ts-expect-error
|
||||
const buildHrefAndPath = item => {
|
||||
let displayPath;
|
||||
let href;
|
||||
if (item.is_alias) {
|
||||
this.FOUND_ALIASES.add(item.word);
|
||||
item = item.original;
|
||||
}
|
||||
const type = itemTypes[item.ty];
|
||||
const name = item.name;
|
||||
let path = item.path;
|
||||
let exactPath = item.exactPath;
|
||||
let path = item.modulePath;
|
||||
let exactPath = item.exactModulePath;
|
||||
|
||||
if (type === "mod") {
|
||||
displayPath = path + "::";
|
||||
href = this.rootPath + path.replace(/::/g, "/") + "/" +
|
||||
name + "/index.html";
|
||||
} else if (type === "import") {
|
||||
displayPath = item.path + "::";
|
||||
href = this.rootPath + item.path.replace(/::/g, "/") +
|
||||
displayPath = item.modulePath + "::";
|
||||
href = this.rootPath + item.modulePath.replace(/::/g, "/") +
|
||||
"/index.html#reexport." + name;
|
||||
} else if (type === "primitive" || type === "keyword") {
|
||||
displayPath = "";
|
||||
@@ -2754,13 +2050,13 @@ class DocSearch {
|
||||
} else if (type === "externcrate") {
|
||||
displayPath = "";
|
||||
href = this.rootPath + name + "/index.html";
|
||||
} else if (item.parent !== undefined) {
|
||||
} else if (item.parent) {
|
||||
const myparent = item.parent;
|
||||
let anchor = type + "." + name;
|
||||
const parentType = itemTypes[myparent.ty];
|
||||
const parentType = itemTypes[myparent.path.ty];
|
||||
let pageType = parentType;
|
||||
let pageName = myparent.name;
|
||||
exactPath = `${myparent.exactPath}::${myparent.name}`;
|
||||
exactPath = `${myparent.path.exactModulePath}::${myparent.name}`;
|
||||
|
||||
if (parentType === "primitive") {
|
||||
displayPath = myparent.name + "::";
|
||||
@@ -2768,9 +2064,9 @@ class DocSearch {
|
||||
} else if (type === "structfield" && parentType === "variant") {
|
||||
// Structfields belonging to variants are special: the
|
||||
// final path element is the enum name.
|
||||
const enumNameIdx = item.path.lastIndexOf("::");
|
||||
const enumName = item.path.substr(enumNameIdx + 2);
|
||||
path = item.path.substr(0, enumNameIdx);
|
||||
const enumNameIdx = item.modulePath.lastIndexOf("::");
|
||||
const enumName = item.modulePath.substr(enumNameIdx + 2);
|
||||
path = item.modulePath.substr(0, enumNameIdx);
|
||||
displayPath = path + "::" + enumName + "::" + myparent.name + "::";
|
||||
anchor = "variant." + myparent.name + ".field." + name;
|
||||
pageType = "enum";
|
||||
@@ -2778,16 +2074,16 @@ class DocSearch {
|
||||
} else {
|
||||
displayPath = path + "::" + myparent.name + "::";
|
||||
}
|
||||
if (item.implDisambiguator !== null) {
|
||||
anchor = item.implDisambiguator + "/" + anchor;
|
||||
if (item.entry && item.entry.associatedItemDisambiguator !== null) {
|
||||
anchor = item.entry.associatedItemDisambiguator + "/" + anchor;
|
||||
}
|
||||
href = this.rootPath + path.replace(/::/g, "/") +
|
||||
"/" + pageType +
|
||||
"." + pageName +
|
||||
".html#" + anchor;
|
||||
} else {
|
||||
displayPath = item.path + "::";
|
||||
href = this.rootPath + item.path.replace(/::/g, "/") +
|
||||
displayPath = item.modulePath + "::";
|
||||
href = this.rootPath + item.modulePath.replace(/::/g, "/") +
|
||||
"/" + type + "." + name + ".html";
|
||||
}
|
||||
return [displayPath, href, `${exactPath}::${name}`];
|
||||
@@ -2806,74 +2102,6 @@ class DocSearch {
|
||||
return tmp;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add extra data to result objects, and filter items that have been
|
||||
* marked for removal.
|
||||
*
|
||||
* @param {rustdoc.ResultObject[]} results
|
||||
* @param {"sig"|"elems"|"returned"|null} typeInfo
|
||||
* @returns {rustdoc.ResultObject[]}
|
||||
*/
|
||||
const transformResults = (results, typeInfo) => {
|
||||
const duplicates = new Set();
|
||||
const out = [];
|
||||
|
||||
for (const result of results) {
|
||||
if (result.id !== -1) {
|
||||
const res = buildHrefAndPath(this.searchIndex[result.id]);
|
||||
// many of these properties don't strictly need to be
|
||||
// copied over, but copying them over satisfies tsc,
|
||||
// and hopefully plays nice with the shape optimization
|
||||
// of the browser engine.
|
||||
/** @type {rustdoc.ResultObject} */
|
||||
const obj = Object.assign({
|
||||
parent: result.parent,
|
||||
type: result.type,
|
||||
dist: result.dist,
|
||||
path_dist: result.path_dist,
|
||||
index: result.index,
|
||||
desc: result.desc,
|
||||
item: result.item,
|
||||
displayPath: pathSplitter(res[0]),
|
||||
fullPath: "",
|
||||
href: "",
|
||||
displayTypeSignature: null,
|
||||
}, this.searchIndex[result.id]);
|
||||
|
||||
// To be sure than it some items aren't considered as duplicate.
|
||||
obj.fullPath = res[2] + "|" + obj.ty;
|
||||
|
||||
if (duplicates.has(obj.fullPath)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Exports are specifically not shown if the items they point at
|
||||
// are already in the results.
|
||||
if (obj.ty === TY_IMPORT && duplicates.has(res[2])) {
|
||||
continue;
|
||||
}
|
||||
if (duplicates.has(res[2] + "|" + TY_IMPORT)) {
|
||||
continue;
|
||||
}
|
||||
duplicates.add(obj.fullPath);
|
||||
duplicates.add(res[2]);
|
||||
|
||||
if (typeInfo !== null) {
|
||||
obj.displayTypeSignature =
|
||||
// @ts-expect-error
|
||||
this.formatDisplayTypeSignature(obj, typeInfo);
|
||||
}
|
||||
|
||||
obj.href = res[1];
|
||||
out.push(obj);
|
||||
if (out.length >= MAX_RESULTS) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
return out;
|
||||
};
|
||||
|
||||
/**
|
||||
* Add extra data to result objects, and filter items that have been
|
||||
* marked for removal.
|
||||
@@ -2883,9 +2111,11 @@ class DocSearch {
|
||||
*
|
||||
* @param {rustdoc.ResultObject} obj
|
||||
* @param {"sig"|"elems"|"returned"|null} typeInfo
|
||||
* @param {rustdoc.QueryElement[]} elems
|
||||
* @param {rustdoc.QueryElement[]} returned
|
||||
* @returns {Promise<rustdoc.DisplayTypeSignature>}
|
||||
*/
|
||||
this.formatDisplayTypeSignature = async(obj, typeInfo) => {
|
||||
const formatDisplayTypeSignature = async(obj, typeInfo, elems, returned) => {
|
||||
const objType = obj.type;
|
||||
if (!objType) {
|
||||
return {type: [], mappedNames: new Map(), whereClause: new Map()};
|
||||
@@ -2897,13 +2127,13 @@ class DocSearch {
|
||||
if (typeInfo !== "elems" && typeInfo !== "returned") {
|
||||
fnInputs = unifyFunctionTypes(
|
||||
objType.inputs,
|
||||
parsedQuery.elems,
|
||||
elems,
|
||||
objType.where_clause,
|
||||
null,
|
||||
mgensScratch => {
|
||||
fnOutput = unifyFunctionTypes(
|
||||
objType.output,
|
||||
parsedQuery.returned,
|
||||
returned,
|
||||
objType.where_clause,
|
||||
mgensScratch,
|
||||
mgensOut => {
|
||||
@@ -2917,10 +2147,9 @@ class DocSearch {
|
||||
0,
|
||||
);
|
||||
} else {
|
||||
const arr = typeInfo === "elems" ? objType.inputs : objType.output;
|
||||
const highlighted = unifyFunctionTypes(
|
||||
arr,
|
||||
parsedQuery.elems,
|
||||
typeInfo === "elems" ? objType.inputs : objType.output,
|
||||
typeInfo === "elems" ? elems : returned,
|
||||
objType.where_clause,
|
||||
null,
|
||||
mgensOut => {
|
||||
@@ -2969,15 +2198,15 @@ class DocSearch {
|
||||
}
|
||||
};
|
||||
|
||||
parsedQuery.elems.forEach(remapQuery);
|
||||
parsedQuery.returned.forEach(remapQuery);
|
||||
elems.forEach(remapQuery);
|
||||
returned.forEach(remapQuery);
|
||||
|
||||
/**
|
||||
* Write text to a highlighting array.
|
||||
* Index 0 is not highlighted, index 1 is highlighted,
|
||||
* index 2 is not highlighted, etc.
|
||||
*
|
||||
* @param {{name?: string, highlighted?: boolean}} fnType - input
|
||||
* @param {{name: string|null, highlighted?: boolean}} fnType - input
|
||||
* @param {string[]} result
|
||||
*/
|
||||
const pushText = (fnType, result) => {
|
||||
@@ -3004,8 +2233,9 @@ class DocSearch {
|
||||
*
|
||||
* @param {rustdoc.HighlightedFunctionType} fnType - input
|
||||
* @param {string[]} result
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
const writeHof = (fnType, result) => {
|
||||
const writeHof = async(fnType, result) => {
|
||||
const hofOutput = fnType.bindings.get(this.typeNameIdOfOutput) || [];
|
||||
const hofInputs = fnType.generics;
|
||||
pushText(fnType, result);
|
||||
@@ -3016,7 +2246,7 @@ class DocSearch {
|
||||
pushText({ name: ", ", highlighted: false }, result);
|
||||
}
|
||||
needsComma = true;
|
||||
writeFn(fnType, result);
|
||||
await writeFn(fnType, result);
|
||||
}
|
||||
pushText({
|
||||
name: hofOutput.length === 0 ? ")" : ") -> ",
|
||||
@@ -3031,7 +2261,7 @@ class DocSearch {
|
||||
pushText({ name: ", ", highlighted: false }, result);
|
||||
}
|
||||
needsComma = true;
|
||||
writeFn(fnType, result);
|
||||
await writeFn(fnType, result);
|
||||
}
|
||||
if (hofOutput.length > 1) {
|
||||
pushText({name: ")", highlighted: false}, result);
|
||||
@@ -3044,8 +2274,9 @@ class DocSearch {
|
||||
*
|
||||
* @param {rustdoc.HighlightedFunctionType} fnType
|
||||
* @param {string[]} result
|
||||
* @returns {Promise<boolean>}
|
||||
*/
|
||||
const writeSpecialPrimitive = (fnType, result) => {
|
||||
const writeSpecialPrimitive = async(fnType, result) => {
|
||||
if (fnType.id === this.typeNameIdOfArray || fnType.id === this.typeNameIdOfSlice ||
|
||||
fnType.id === this.typeNameIdOfTuple || fnType.id === this.typeNameIdOfUnit) {
|
||||
const [ob, sb] =
|
||||
@@ -3054,7 +2285,7 @@ class DocSearch {
|
||||
["[", "]"] :
|
||||
["(", ")"];
|
||||
pushText({ name: ob, highlighted: fnType.highlighted }, result);
|
||||
onEachBtwn(
|
||||
await onEachBtwnAsync(
|
||||
fnType.generics,
|
||||
nested => writeFn(nested, result),
|
||||
// @ts-expect-error
|
||||
@@ -3065,11 +2296,11 @@ class DocSearch {
|
||||
} else if (fnType.id === this.typeNameIdOfReference) {
|
||||
pushText({ name: "&", highlighted: fnType.highlighted }, result);
|
||||
let prevHighlighted = false;
|
||||
onEachBtwn(
|
||||
await onEachBtwnAsync(
|
||||
fnType.generics,
|
||||
value => {
|
||||
async value => {
|
||||
prevHighlighted = !!value.highlighted;
|
||||
writeFn(value, result);
|
||||
await writeFn(value, result);
|
||||
},
|
||||
// @ts-expect-error
|
||||
value => pushText({
|
||||
@@ -3078,8 +2309,16 @@ class DocSearch {
|
||||
}, result),
|
||||
);
|
||||
return true;
|
||||
} else if (fnType.id === this.typeNameIdOfFn) {
|
||||
writeHof(fnType, result);
|
||||
} else if (
|
||||
fnType.id === this.typeNameIdOfFn ||
|
||||
fnType.id === this.typeNameIdOfFnMut ||
|
||||
fnType.id === this.typeNameIdOfFnOnce ||
|
||||
fnType.id === this.typeNameIdOfFnPtr
|
||||
) {
|
||||
await writeHof(fnType, result);
|
||||
return true;
|
||||
} else if (fnType.id === this.typeNameIdOfNever) {
|
||||
pushText({ name: "!", highlighted: fnType.highlighted }, result);
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
@@ -3091,8 +2330,9 @@ class DocSearch {
|
||||
*
|
||||
* @param {rustdoc.HighlightedFunctionType} fnType
|
||||
* @param {string[]} result
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
const writeFn = (fnType, result) => {
|
||||
const writeFn = async(fnType, result) => {
|
||||
if (fnType.id !== null && fnType.id < 0) {
|
||||
if (fnParamNames[-1 - fnType.id] === "") {
|
||||
// Normally, there's no need to shown an unhighlighted
|
||||
@@ -3101,7 +2341,7 @@ class DocSearch {
|
||||
fnType.generics :
|
||||
objType.where_clause[-1 - fnType.id];
|
||||
for (const nested of generics) {
|
||||
writeFn(nested, result);
|
||||
await writeFn(nested, result);
|
||||
}
|
||||
return;
|
||||
} else if (mgens) {
|
||||
@@ -3120,7 +2360,7 @@ class DocSearch {
|
||||
}, result);
|
||||
/** @type{string[]} */
|
||||
const where = [];
|
||||
onEachBtwn(
|
||||
await onEachBtwnAsync(
|
||||
fnType.generics,
|
||||
nested => writeFn(nested, where),
|
||||
// @ts-expect-error
|
||||
@@ -3131,32 +2371,61 @@ class DocSearch {
|
||||
}
|
||||
} else {
|
||||
if (fnType.ty === TY_PRIMITIVE) {
|
||||
if (writeSpecialPrimitive(fnType, result)) {
|
||||
if (await writeSpecialPrimitive(fnType, result)) {
|
||||
return;
|
||||
}
|
||||
} else if (fnType.ty === TY_TRAIT && (
|
||||
fnType.id === this.typeNameIdOfFn ||
|
||||
fnType.id === this.typeNameIdOfFnMut ||
|
||||
fnType.id === this.typeNameIdOfFnOnce)) {
|
||||
writeHof(fnType, result);
|
||||
fnType.id === this.typeNameIdOfFnMut ||
|
||||
fnType.id === this.typeNameIdOfFnOnce ||
|
||||
fnType.id === this.typeNameIdOfFnPtr
|
||||
)) {
|
||||
await writeHof(fnType, result);
|
||||
return;
|
||||
} else if (fnType.name === "" &&
|
||||
fnType.bindings.size === 0 &&
|
||||
fnType.generics.length !== 0
|
||||
) {
|
||||
pushText({ name: "impl ", highlighted: false }, result);
|
||||
if (fnType.generics.length > 1) {
|
||||
pushText({ name: "(", highlighted: false }, result);
|
||||
}
|
||||
await onEachBtwnAsync(
|
||||
fnType.generics,
|
||||
value => writeFn(value, result),
|
||||
// @ts-expect-error
|
||||
() => pushText({ name: ", ", highlighted: false }, result),
|
||||
);
|
||||
if (fnType.generics.length > 1) {
|
||||
pushText({ name: ")", highlighted: false }, result);
|
||||
}
|
||||
return;
|
||||
}
|
||||
pushText(fnType, result);
|
||||
let hasBindings = false;
|
||||
if (fnType.bindings.size > 0) {
|
||||
onEachBtwn(
|
||||
fnType.bindings,
|
||||
([key, values]) => {
|
||||
const name = this.assocTypeIdNameMap.get(key);
|
||||
await onEachBtwnAsync(
|
||||
await Promise.all([...fnType.bindings.entries()].map(
|
||||
/**
|
||||
* @param {[number, rustdoc.HighlightedFunctionType[]]} param0
|
||||
* @returns {Promise<[
|
||||
* string|null,
|
||||
* rustdoc.HighlightedFunctionType[],
|
||||
* ]>}
|
||||
*/
|
||||
async([key, values]) => [await this.getName(key), values],
|
||||
)),
|
||||
async([name, values]) => {
|
||||
// @ts-expect-error
|
||||
if (values.length === 1 && values[0].id < 0 &&
|
||||
// @ts-expect-error
|
||||
`${fnType.name}::${name}` === fnParamNames[-1 - values[0].id]) {
|
||||
`${fnType.name}::${name}` === fnParamNames[-1 - values[0].id]
|
||||
) {
|
||||
// the internal `Item=Iterator::Item` type variable should be
|
||||
// shown in the where clause and name mapping output, but is
|
||||
// redundant in this spot
|
||||
for (const value of values) {
|
||||
writeFn(value, []);
|
||||
await writeFn(value, []);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
@@ -3169,7 +2438,7 @@ class DocSearch {
|
||||
name: values.length !== 1 ? "=(" : "=",
|
||||
highlighted: false,
|
||||
}, result);
|
||||
onEachBtwn(
|
||||
await onEachBtwnAsync(
|
||||
values || [],
|
||||
value => writeFn(value, result),
|
||||
// @ts-expect-error
|
||||
@@ -3186,7 +2455,7 @@ class DocSearch {
|
||||
if (fnType.generics.length > 0) {
|
||||
pushText({ name: hasBindings ? ", " : "<", highlighted: false }, result);
|
||||
}
|
||||
onEachBtwn(
|
||||
await onEachBtwnAsync(
|
||||
fnType.generics,
|
||||
value => writeFn(value, result),
|
||||
// @ts-expect-error
|
||||
@@ -3199,14 +2468,14 @@ class DocSearch {
|
||||
};
|
||||
/** @type {string[]} */
|
||||
const type = [];
|
||||
onEachBtwn(
|
||||
await onEachBtwnAsync(
|
||||
fnInputs,
|
||||
fnType => writeFn(fnType, type),
|
||||
// @ts-expect-error
|
||||
() => pushText({ name: ", ", highlighted: false }, type),
|
||||
);
|
||||
pushText({ name: " -> ", highlighted: false }, type);
|
||||
onEachBtwn(
|
||||
await onEachBtwnAsync(
|
||||
fnOutput,
|
||||
fnType => writeFn(fnType, type),
|
||||
// @ts-expect-error
|
||||
@@ -3217,177 +2486,253 @@ class DocSearch {
|
||||
};
|
||||
|
||||
/**
|
||||
* This function takes a result map, and sorts it by various criteria, including edit
|
||||
* distance, substring match, and the crate it comes from.
|
||||
* Add extra data to result objects, and filter items that have been
|
||||
* marked for removal.
|
||||
*
|
||||
* @param {rustdoc.Results} results
|
||||
* @param {[rustdoc.PlainResultObject, rustdoc.Row][]} results
|
||||
* @param {"sig"|"elems"|"returned"|null} typeInfo
|
||||
* @param {string} preferredCrate
|
||||
* @returns {Promise<rustdoc.ResultObject[]>}
|
||||
* @param {Set<string>} duplicates
|
||||
* @returns {rustdoc.ResultObject[]}
|
||||
*/
|
||||
const sortResults = async(results, typeInfo, preferredCrate) => {
|
||||
const userQuery = parsedQuery.userQuery;
|
||||
const normalizedUserQuery = parsedQuery.userQuery.toLowerCase();
|
||||
const isMixedCase = normalizedUserQuery !== userQuery;
|
||||
const result_list = [];
|
||||
const isReturnTypeQuery = parsedQuery.elems.length === 0 ||
|
||||
typeInfo === "returned";
|
||||
for (const result of results.values()) {
|
||||
result.item = this.searchIndex[result.id];
|
||||
result.word = this.searchIndex[result.id].word;
|
||||
if (isReturnTypeQuery) {
|
||||
// We are doing a return-type based search, deprioritize "clone-like" results,
|
||||
// ie. functions that also take the queried type as an argument.
|
||||
const resultItemType = result.item && result.item.type;
|
||||
if (!resultItemType) {
|
||||
const transformResults = (results, typeInfo, duplicates) => {
|
||||
const out = [];
|
||||
|
||||
for (const [result, item] of results) {
|
||||
if (item.id !== -1) {
|
||||
const res = buildHrefAndPath(item);
|
||||
// many of these properties don't strictly need to be
|
||||
// copied over, but copying them over satisfies tsc,
|
||||
// and hopefully plays nice with the shape optimization
|
||||
// of the browser engine.
|
||||
/** @type {rustdoc.ResultObject} */
|
||||
const obj = Object.assign({
|
||||
parent: item.parent ? {
|
||||
path: item.parent.path.modulePath,
|
||||
exactPath: item.parent.path.exactModulePath ||
|
||||
item.parent.path.modulePath,
|
||||
name: item.parent.name,
|
||||
ty: item.parent.path.ty,
|
||||
} : undefined,
|
||||
type: item.type && item.type.functionSignature ?
|
||||
item.type.functionSignature :
|
||||
undefined,
|
||||
paramNames: item.type && item.type.paramNames ?
|
||||
item.type.paramNames :
|
||||
undefined,
|
||||
dist: result.dist,
|
||||
path_dist: result.path_dist,
|
||||
index: result.index,
|
||||
desc: this.getDesc(result.id),
|
||||
item,
|
||||
displayPath: pathSplitter(res[0]),
|
||||
fullPath: "",
|
||||
href: "",
|
||||
displayTypeSignature: null,
|
||||
}, result);
|
||||
|
||||
// To be sure than it some items aren't considered as duplicate.
|
||||
obj.fullPath = res[2] + "|" + obj.item.ty;
|
||||
|
||||
if (duplicates.has(obj.fullPath)) {
|
||||
continue;
|
||||
}
|
||||
const inputs = resultItemType.inputs;
|
||||
const where_clause = resultItemType.where_clause;
|
||||
if (containsTypeFromQuery(inputs, where_clause)) {
|
||||
result.path_dist *= 100;
|
||||
result.dist *= 100;
|
||||
|
||||
// Exports are specifically not shown if the items they point at
|
||||
// are already in the results.
|
||||
if (obj.item.ty === TY_IMPORT && duplicates.has(res[2])) {
|
||||
continue;
|
||||
}
|
||||
if (duplicates.has(res[2] + "|" + TY_IMPORT)) {
|
||||
continue;
|
||||
}
|
||||
duplicates.add(obj.fullPath);
|
||||
duplicates.add(res[2]);
|
||||
|
||||
if (typeInfo !== null) {
|
||||
obj.displayTypeSignature = formatDisplayTypeSignature(
|
||||
obj,
|
||||
typeInfo,
|
||||
result.elems,
|
||||
result.returned,
|
||||
);
|
||||
}
|
||||
|
||||
obj.href = res[1];
|
||||
out.push(obj);
|
||||
if (out.length >= MAX_RESULTS) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
result_list.push(result);
|
||||
}
|
||||
|
||||
result_list.sort((aaa, bbb) => {
|
||||
/** @type {number} */
|
||||
let a;
|
||||
/** @type {number} */
|
||||
let b;
|
||||
|
||||
// sort by exact case-sensitive match
|
||||
if (isMixedCase) {
|
||||
a = Number(aaa.item.name !== userQuery);
|
||||
b = Number(bbb.item.name !== userQuery);
|
||||
if (a !== b) {
|
||||
return a - b;
|
||||
}
|
||||
}
|
||||
|
||||
// sort by exact match with regard to the last word (mismatch goes later)
|
||||
a = Number(aaa.word !== normalizedUserQuery);
|
||||
b = Number(bbb.word !== normalizedUserQuery);
|
||||
if (a !== b) {
|
||||
return a - b;
|
||||
}
|
||||
|
||||
// sort by index of keyword in item name (no literal occurrence goes later)
|
||||
a = Number(aaa.index < 0);
|
||||
b = Number(bbb.index < 0);
|
||||
if (a !== b) {
|
||||
return a - b;
|
||||
}
|
||||
|
||||
// in type based search, put functions first
|
||||
if (parsedQuery.hasReturnArrow) {
|
||||
a = Number(!isFnLikeTy(aaa.item.ty));
|
||||
b = Number(!isFnLikeTy(bbb.item.ty));
|
||||
if (a !== b) {
|
||||
return a - b;
|
||||
}
|
||||
}
|
||||
|
||||
// Sort by distance in the path part, if specified
|
||||
// (less changes required to match means higher rankings)
|
||||
a = Number(aaa.path_dist);
|
||||
b = Number(bbb.path_dist);
|
||||
if (a !== b) {
|
||||
return a - b;
|
||||
}
|
||||
|
||||
// (later literal occurrence, if any, goes later)
|
||||
a = Number(aaa.index);
|
||||
b = Number(bbb.index);
|
||||
if (a !== b) {
|
||||
return a - b;
|
||||
}
|
||||
|
||||
// Sort by distance in the name part, the last part of the path
|
||||
// (less changes required to match means higher rankings)
|
||||
a = Number(aaa.dist);
|
||||
b = Number(bbb.dist);
|
||||
if (a !== b) {
|
||||
return a - b;
|
||||
}
|
||||
|
||||
// sort deprecated items later
|
||||
a = Number(
|
||||
// @ts-expect-error
|
||||
this.searchIndexDeprecated.get(aaa.item.crate).contains(aaa.item.bitIndex),
|
||||
);
|
||||
b = Number(
|
||||
// @ts-expect-error
|
||||
this.searchIndexDeprecated.get(bbb.item.crate).contains(bbb.item.bitIndex),
|
||||
);
|
||||
if (a !== b) {
|
||||
return a - b;
|
||||
}
|
||||
|
||||
// sort by crate (current crate comes first)
|
||||
a = Number(aaa.item.crate !== preferredCrate);
|
||||
b = Number(bbb.item.crate !== preferredCrate);
|
||||
if (a !== b) {
|
||||
return a - b;
|
||||
}
|
||||
|
||||
// sort by item name length (longer goes later)
|
||||
a = Number(aaa.word.length);
|
||||
b = Number(bbb.word.length);
|
||||
if (a !== b) {
|
||||
return a - b;
|
||||
}
|
||||
|
||||
// sort doc alias items later
|
||||
a = Number(aaa.item.is_alias === true);
|
||||
b = Number(bbb.item.is_alias === true);
|
||||
if (a !== b) {
|
||||
return a - b;
|
||||
}
|
||||
|
||||
// sort by item name (lexicographically larger goes later)
|
||||
let aw = aaa.word;
|
||||
let bw = bbb.word;
|
||||
if (aw !== bw) {
|
||||
return (aw > bw ? +1 : -1);
|
||||
}
|
||||
|
||||
// sort by description (no description goes later)
|
||||
a = Number(
|
||||
// @ts-expect-error
|
||||
this.searchIndexEmptyDesc.get(aaa.item.crate).contains(aaa.item.bitIndex),
|
||||
);
|
||||
b = Number(
|
||||
// @ts-expect-error
|
||||
this.searchIndexEmptyDesc.get(bbb.item.crate).contains(bbb.item.bitIndex),
|
||||
);
|
||||
if (a !== b) {
|
||||
return a - b;
|
||||
}
|
||||
|
||||
// sort by type (later occurrence in `itemTypes` goes later)
|
||||
a = Number(aaa.item.ty);
|
||||
b = Number(bbb.item.ty);
|
||||
if (a !== b) {
|
||||
return a - b;
|
||||
}
|
||||
|
||||
// sort by path (lexicographically larger goes later)
|
||||
aw = aaa.item.path;
|
||||
bw = bbb.item.path;
|
||||
if (aw !== bw) {
|
||||
return (aw > bw ? +1 : -1);
|
||||
}
|
||||
|
||||
// que sera, sera
|
||||
return 0;
|
||||
});
|
||||
|
||||
return transformResults(result_list, typeInfo);
|
||||
return out;
|
||||
};
|
||||
|
||||
const sortAndTransformResults =
|
||||
/**
|
||||
* @this {DocSearch}
|
||||
* @param {Array<rustdoc.PlainResultObject|null>} results
|
||||
* @param {"sig"|"elems"|"returned"|null} typeInfo
|
||||
* @param {string} preferredCrate
|
||||
* @param {Set<string>} duplicates
|
||||
* @returns {AsyncGenerator<rustdoc.ResultObject, number>}
|
||||
*/
|
||||
async function*(results, typeInfo, preferredCrate, duplicates) {
|
||||
const userQuery = parsedQuery.userQuery;
|
||||
const normalizedUserQuery = parsedQuery.userQuery.toLowerCase();
|
||||
const isMixedCase = normalizedUserQuery !== userQuery;
|
||||
/**
|
||||
* @type {[rustdoc.PlainResultObject, rustdoc.Row][]}
|
||||
*/
|
||||
const result_list = [];
|
||||
for (const result of results.values()) {
|
||||
if (!result) {
|
||||
continue;
|
||||
}
|
||||
/**
|
||||
* @type {rustdoc.Row?}
|
||||
*/
|
||||
const item = await this.getRow(result.id);
|
||||
if (!item) {
|
||||
continue;
|
||||
}
|
||||
if (filterCrates !== null && item.crate !== filterCrates) {
|
||||
continue;
|
||||
}
|
||||
if (item) {
|
||||
result_list.push([result, item]);
|
||||
} else {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
result_list.sort(([aaa, aai], [bbb, bbi]) => {
|
||||
/** @type {number} */
|
||||
let a;
|
||||
/** @type {number} */
|
||||
let b;
|
||||
|
||||
if (typeInfo === null) {
|
||||
// in name based search...
|
||||
|
||||
// sort by exact case-sensitive match
|
||||
if (isMixedCase) {
|
||||
a = Number(aai.name !== userQuery);
|
||||
b = Number(bbi.name !== userQuery);
|
||||
if (a !== b) {
|
||||
return a - b;
|
||||
}
|
||||
}
|
||||
|
||||
// sort by exact match with regard to the last word (mismatch goes later)
|
||||
a = Number(aai.normalizedName !== normalizedUserQuery);
|
||||
b = Number(bbi.normalizedName !== normalizedUserQuery);
|
||||
if (a !== b) {
|
||||
return a - b;
|
||||
}
|
||||
|
||||
// sort by index of keyword in item name (no literal occurrence goes later)
|
||||
a = Number(aaa.index < 0);
|
||||
b = Number(bbb.index < 0);
|
||||
if (a !== b) {
|
||||
return a - b;
|
||||
}
|
||||
}
|
||||
|
||||
// Sort by distance in the path part, if specified
|
||||
// (less changes required to match means higher rankings)
|
||||
a = Number(aaa.path_dist);
|
||||
b = Number(bbb.path_dist);
|
||||
if (a !== b) {
|
||||
return a - b;
|
||||
}
|
||||
|
||||
// (later literal occurrence, if any, goes later)
|
||||
a = Number(aaa.index);
|
||||
b = Number(bbb.index);
|
||||
if (a !== b) {
|
||||
return a - b;
|
||||
}
|
||||
|
||||
// Sort by distance in the name part, the last part of the path
|
||||
// (less changes required to match means higher rankings)
|
||||
a = Number(aaa.dist);
|
||||
b = Number(bbb.dist);
|
||||
if (a !== b) {
|
||||
return a - b;
|
||||
}
|
||||
|
||||
// sort aliases lower
|
||||
a = Number(aaa.is_alias);
|
||||
b = Number(bbb.is_alias);
|
||||
if (a !== b) {
|
||||
return a - b;
|
||||
}
|
||||
|
||||
// sort deprecated items later
|
||||
a = Number(aai.deprecated);
|
||||
b = Number(bbi.deprecated);
|
||||
if (a !== b) {
|
||||
return a - b;
|
||||
}
|
||||
|
||||
// sort by crate (current crate comes first)
|
||||
a = Number(aai.crate !== preferredCrate);
|
||||
b = Number(bbi.crate !== preferredCrate);
|
||||
if (a !== b) {
|
||||
return a - b;
|
||||
}
|
||||
|
||||
// sort by item name length (longer goes later)
|
||||
a = Number(aai.normalizedName.length);
|
||||
b = Number(bbi.normalizedName.length);
|
||||
if (a !== b) {
|
||||
return a - b;
|
||||
}
|
||||
|
||||
// sort by item name (lexicographically larger goes later)
|
||||
let aw = aai.normalizedName;
|
||||
let bw = bbi.normalizedName;
|
||||
if (aw !== bw) {
|
||||
return (aw > bw ? +1 : -1);
|
||||
}
|
||||
|
||||
// sort by description (no description goes later)
|
||||
const di = this.database.getData("desc");
|
||||
if (di) {
|
||||
a = Number(di.isEmpty(aaa.id));
|
||||
b = Number(di.isEmpty(bbb.id));
|
||||
if (a !== b) {
|
||||
return a - b;
|
||||
}
|
||||
}
|
||||
|
||||
// sort by type (later occurrence in `itemTypes` goes later)
|
||||
a = Number(aai.ty);
|
||||
b = Number(bbi.ty);
|
||||
if (a !== b) {
|
||||
return a - b;
|
||||
}
|
||||
|
||||
// sort by path (lexicographically larger goes later)
|
||||
const ap = aai.modulePath;
|
||||
const bp = bbi.modulePath;
|
||||
aw = ap === undefined ? "" : ap;
|
||||
bw = bp === undefined ? "" : bp;
|
||||
if (aw !== bw) {
|
||||
return (aw > bw ? +1 : -1);
|
||||
}
|
||||
|
||||
// que sera, sera
|
||||
return 0;
|
||||
});
|
||||
|
||||
const transformed_result_list = transformResults(result_list, typeInfo, duplicates);
|
||||
yield* transformed_result_list;
|
||||
return transformed_result_list.length;
|
||||
}
|
||||
.bind(this);
|
||||
|
||||
/**
|
||||
* This function checks if a list of search query `queryElems` can all be found in the
|
||||
* search index (`fnTypes`).
|
||||
@@ -3938,6 +3283,8 @@ class DocSearch {
|
||||
}
|
||||
return true;
|
||||
} else {
|
||||
// For these special cases, matching code need added to the inverted index.
|
||||
// search_index.rs -> convert_render_type does this
|
||||
if (queryElem.id === this.typeNameIdOfArrayOrSlice &&
|
||||
(fnType.id === this.typeNameIdOfSlice || fnType.id === this.typeNameIdOfArray)
|
||||
) {
|
||||
@@ -3948,10 +3295,12 @@ class DocSearch {
|
||||
) {
|
||||
// () matches primitive:tuple or primitive:unit
|
||||
// if it matches, then we're fine, and this is an appropriate match candidate
|
||||
} else if (queryElem.id === this.typeNameIdOfHof &&
|
||||
(fnType.id === this.typeNameIdOfFn || fnType.id === this.typeNameIdOfFnMut ||
|
||||
fnType.id === this.typeNameIdOfFnOnce)
|
||||
) {
|
||||
} else if (queryElem.id === this.typeNameIdOfHof && (
|
||||
fnType.id === this.typeNameIdOfFn ||
|
||||
fnType.id === this.typeNameIdOfFnMut ||
|
||||
fnType.id === this.typeNameIdOfFnOnce ||
|
||||
fnType.id === this.typeNameIdOfFnPtr
|
||||
)) {
|
||||
// -> matches fn, fnonce, and fnmut
|
||||
// if it matches, then we're fine, and this is an appropriate match candidate
|
||||
} else if (fnType.id !== queryElem.id || queryElem.id === null) {
|
||||
@@ -4134,21 +3483,13 @@ class DocSearch {
|
||||
* This function checks if the given list contains any
|
||||
* (non-generic) types mentioned in the query.
|
||||
*
|
||||
* @param {rustdoc.QueryElement[]} elems
|
||||
* @param {rustdoc.FunctionType[]} list - A list of function types.
|
||||
* @param {rustdoc.FunctionType[][]} where_clause - Trait bounds for generic items.
|
||||
*/
|
||||
function containsTypeFromQuery(list, where_clause) {
|
||||
function containsTypeFromQuery(elems, list, where_clause) {
|
||||
if (!list) return false;
|
||||
for (const ty of parsedQuery.returned) {
|
||||
// negative type ids are generics
|
||||
if (ty.id !== null && ty.id < 0) {
|
||||
continue;
|
||||
}
|
||||
if (checkIfInList(list, ty, where_clause, null, 0)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
for (const ty of parsedQuery.elems) {
|
||||
for (const ty of elems) {
|
||||
if (ty.id !== null && ty.id < 0) {
|
||||
continue;
|
||||
}
|
||||
@@ -4240,10 +3581,10 @@ class DocSearch {
|
||||
/**
|
||||
* Compute an "edit distance" that ignores missing path elements.
|
||||
* @param {string[]} contains search query path
|
||||
* @param {rustdoc.Row} ty indexed item
|
||||
* @param {string[]} path indexed page path
|
||||
* @returns {null|number} edit distance
|
||||
*/
|
||||
function checkPath(contains, ty) {
|
||||
function checkPath(contains, path) {
|
||||
if (contains.length === 0) {
|
||||
return 0;
|
||||
}
|
||||
@@ -4251,11 +3592,6 @@ class DocSearch {
|
||||
contains.reduce((acc, next) => acc + next.length, 0) / 3,
|
||||
);
|
||||
let ret_dist = maxPathEditDistance + 1;
|
||||
const path = ty.path.split("::");
|
||||
|
||||
if (ty.parent && ty.parent.name) {
|
||||
path.push(ty.parent.name.toLowerCase());
|
||||
}
|
||||
|
||||
const length = path.length;
|
||||
const clength = contains.length;
|
||||
@@ -4281,7 +3617,32 @@ class DocSearch {
|
||||
return ret_dist > maxPathEditDistance ? null : ret_dist;
|
||||
}
|
||||
|
||||
// @ts-expect-error
|
||||
/**
|
||||
* Compute an "edit distance" that ignores missing path elements.
|
||||
* @param {string[]} contains search query path
|
||||
* @param {rustdoc.Row} row indexed item
|
||||
* @returns {null|number} edit distance
|
||||
*/
|
||||
function checkRowPath(contains, row) {
|
||||
if (contains.length === 0) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
const path = row.modulePath.split("::");
|
||||
|
||||
if (row.parent && row.parent.name) {
|
||||
path.push(row.parent.name.toLowerCase());
|
||||
}
|
||||
|
||||
return checkPath(contains, path);
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {number} filter
|
||||
* @param {rustdoc.ItemType} type
|
||||
* @returns
|
||||
*/
|
||||
function typePassesFilter(filter, type) {
|
||||
// No filter or Exact mach
|
||||
if (filter <= NO_TYPE_FILTER || filter === type) return true;
|
||||
@@ -4303,366 +3664,839 @@ class DocSearch {
|
||||
return false;
|
||||
}
|
||||
|
||||
// @ts-expect-error
|
||||
const handleAliases = async(ret, query, filterCrates, currentCrate) => {
|
||||
const lowerQuery = query.toLowerCase();
|
||||
if (this.FOUND_ALIASES.has(lowerQuery)) {
|
||||
return;
|
||||
}
|
||||
this.FOUND_ALIASES.add(lowerQuery);
|
||||
// We separate aliases and crate aliases because we want to have current crate
|
||||
// aliases to be before the others in the displayed results.
|
||||
// @ts-expect-error
|
||||
const aliases = [];
|
||||
// @ts-expect-error
|
||||
const crateAliases = [];
|
||||
if (filterCrates !== null) {
|
||||
if (this.ALIASES.has(filterCrates)
|
||||
&& this.ALIASES.get(filterCrates).has(lowerQuery)) {
|
||||
const query_aliases = this.ALIASES.get(filterCrates).get(lowerQuery);
|
||||
for (const alias of query_aliases) {
|
||||
aliases.push(alias);
|
||||
}
|
||||
const innerRunNameQuery =
|
||||
/**
|
||||
* @this {DocSearch}
|
||||
* @param {string} currentCrate
|
||||
* @returns {AsyncGenerator<rustdoc.ResultObject>}
|
||||
*/
|
||||
async function*(currentCrate) {
|
||||
const index = this.database.getIndex("normalizedName");
|
||||
if (!index) {
|
||||
return;
|
||||
}
|
||||
} else {
|
||||
for (const [crate, crateAliasesIndex] of this.ALIASES) {
|
||||
if (crateAliasesIndex.has(lowerQuery)) {
|
||||
// @ts-expect-error
|
||||
const pushTo = crate === currentCrate ? crateAliases : aliases;
|
||||
const query_aliases = crateAliasesIndex.get(lowerQuery);
|
||||
for (const alias of query_aliases) {
|
||||
pushTo.push(alias);
|
||||
const idDuplicates = new Set();
|
||||
const pathDuplicates = new Set();
|
||||
let count = 0;
|
||||
const prefixResults = [];
|
||||
const normalizedUserQuery = parsedQuery.userQuery
|
||||
.replace(/[_"]/g, "")
|
||||
.toLowerCase();
|
||||
/**
|
||||
* @param {string} name
|
||||
* @param {number} alias
|
||||
* @param {number} dist
|
||||
* @param {number} index
|
||||
* @returns {Promise<rustdoc.PlainResultObject?>}
|
||||
*/
|
||||
const handleAlias = async(name, alias, dist, index) => {
|
||||
return {
|
||||
id: alias,
|
||||
dist,
|
||||
path_dist: 0,
|
||||
index,
|
||||
alias: name,
|
||||
is_alias: true,
|
||||
elems: [], // only used in type-based queries
|
||||
returned: [], // only used in type-based queries
|
||||
original: await this.getRow(alias),
|
||||
};
|
||||
};
|
||||
/**
|
||||
* @param {Promise<rustdoc.PlainResultObject|null>[]} data
|
||||
* @returns {AsyncGenerator<rustdoc.ResultObject, boolean>}
|
||||
*/
|
||||
const flush = async function* (data) {
|
||||
const satr = sortAndTransformResults(
|
||||
await Promise.all(data),
|
||||
null,
|
||||
currentCrate,
|
||||
pathDuplicates,
|
||||
);
|
||||
data.length = 0;
|
||||
for await (const processed of satr) {
|
||||
yield processed;
|
||||
count += 1;
|
||||
if ((count & 0x7F) === 0) {
|
||||
await yieldToBrowser();
|
||||
}
|
||||
if (count >= MAX_RESULTS) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
};
|
||||
const aliasResults = await index.search(normalizedUserQuery);
|
||||
if (aliasResults) {
|
||||
for (const id of aliasResults.matches().entries()) {
|
||||
const [name, alias] = await Promise.all([
|
||||
this.getName(id),
|
||||
this.getAliasTarget(id),
|
||||
]);
|
||||
if (name !== null &&
|
||||
alias !== null &&
|
||||
!idDuplicates.has(id) &&
|
||||
name.replace(/[_"]/g, "").toLowerCase() === normalizedUserQuery
|
||||
) {
|
||||
prefixResults.push(handleAlias(name, alias, 0, 0));
|
||||
idDuplicates.add(id);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// @ts-expect-error
|
||||
const sortFunc = (aaa, bbb) => {
|
||||
if (aaa.original.path < bbb.original.path) {
|
||||
return 1;
|
||||
} else if (aaa.original.path === bbb.original.path) {
|
||||
return 0;
|
||||
if (parsedQuery.error !== null || parsedQuery.elems.length === 0) {
|
||||
yield* flush(prefixResults);
|
||||
return;
|
||||
}
|
||||
return -1;
|
||||
};
|
||||
// @ts-expect-error
|
||||
crateAliases.sort(sortFunc);
|
||||
aliases.sort(sortFunc);
|
||||
|
||||
// @ts-expect-error
|
||||
const pushFunc = alias => {
|
||||
// Cloning `alias` to prevent its fields to be updated.
|
||||
alias = {...alias};
|
||||
const res = buildHrefAndPath(alias);
|
||||
alias.displayPath = pathSplitter(res[0]);
|
||||
alias.fullPath = alias.displayPath + alias.name;
|
||||
alias.href = res[1];
|
||||
|
||||
ret.others.unshift(alias);
|
||||
if (ret.others.length > MAX_RESULTS) {
|
||||
ret.others.pop();
|
||||
}
|
||||
};
|
||||
|
||||
aliases.forEach(pushFunc);
|
||||
// @ts-expect-error
|
||||
crateAliases.forEach(pushFunc);
|
||||
};
|
||||
|
||||
/**
|
||||
* This function adds the given result into the provided `results` map if it matches the
|
||||
* following condition:
|
||||
*
|
||||
* * If it is a "literal search" (`parsedQuery.literalSearch`), then `dist` must be 0.
|
||||
* * If it is not a "literal search", `dist` must be <= `maxEditDistance`.
|
||||
*
|
||||
* The `results` map contains information which will be used to sort the search results:
|
||||
*
|
||||
* * `fullId` is an `integer`` used as the key of the object we use for the `results` map.
|
||||
* * `id` is the index in the `searchIndex` array for this element.
|
||||
* * `index` is an `integer`` used to sort by the position of the word in the item's name.
|
||||
* * `dist` is the main metric used to sort the search results.
|
||||
* * `path_dist` is zero if a single-component search query is used, otherwise it's the
|
||||
* distance computed for everything other than the last path component.
|
||||
*
|
||||
* @param {rustdoc.Results} results
|
||||
* @param {number} fullId
|
||||
* @param {number} id
|
||||
* @param {number} index
|
||||
* @param {number} dist
|
||||
* @param {number} path_dist
|
||||
* @param {number} maxEditDistance
|
||||
*/
|
||||
function addIntoResults(results, fullId, id, index, dist, path_dist, maxEditDistance) {
|
||||
if (dist <= maxEditDistance || index !== -1) {
|
||||
if (results.has(fullId)) {
|
||||
const result = results.get(fullId);
|
||||
if (result === undefined || result.dontValidate || result.dist <= dist) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
// @ts-expect-error
|
||||
results.set(fullId, {
|
||||
id: id,
|
||||
index: index,
|
||||
dontValidate: parsedQuery.literalSearch,
|
||||
dist: dist,
|
||||
path_dist: path_dist,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* This function is called in case the query has more than one element. In this case, it'll
|
||||
* try to match the items which validates all the elements. For `aa -> bb` will look for
|
||||
* functions which have a parameter `aa` and has `bb` in its returned values.
|
||||
*
|
||||
* @param {rustdoc.Row} row
|
||||
* @param {number} pos - Position in the `searchIndex`.
|
||||
* @param {rustdoc.Results} results
|
||||
*/
|
||||
function handleArgs(row, pos, results) {
|
||||
if (!row || (filterCrates !== null && row.crate !== filterCrates)) {
|
||||
return;
|
||||
}
|
||||
const rowType = row.type;
|
||||
if (!rowType) {
|
||||
return;
|
||||
}
|
||||
|
||||
const tfpDist = compareTypeFingerprints(
|
||||
row.id,
|
||||
parsedQuery.typeFingerprint,
|
||||
);
|
||||
if (tfpDist === null) {
|
||||
return;
|
||||
}
|
||||
// @ts-expect-error
|
||||
if (results.size >= MAX_RESULTS && tfpDist > results.max_dist) {
|
||||
return;
|
||||
}
|
||||
|
||||
// If the result is too "bad", we return false and it ends this search.
|
||||
if (!unifyFunctionTypes(
|
||||
rowType.inputs,
|
||||
parsedQuery.elems,
|
||||
rowType.where_clause,
|
||||
null,
|
||||
// @ts-expect-error
|
||||
mgens => {
|
||||
return unifyFunctionTypes(
|
||||
rowType.output,
|
||||
parsedQuery.returned,
|
||||
rowType.where_clause,
|
||||
mgens,
|
||||
checkTypeMgensForConflict,
|
||||
0, // unboxing depth
|
||||
);
|
||||
},
|
||||
0, // unboxing depth
|
||||
)) {
|
||||
return;
|
||||
}
|
||||
|
||||
results.max_dist = Math.max(results.max_dist || 0, tfpDist);
|
||||
addIntoResults(results, row.id, pos, 0, tfpDist, 0, Number.MAX_VALUE);
|
||||
}
|
||||
|
||||
/**
|
||||
* Compare the query fingerprint with the function fingerprint.
|
||||
*
|
||||
* @param {number} fullId - The function
|
||||
* @param {Uint32Array} queryFingerprint - The query
|
||||
* @returns {number|null} - Null if non-match, number if distance
|
||||
* This function might return 0!
|
||||
*/
|
||||
const compareTypeFingerprints = (fullId, queryFingerprint) => {
|
||||
const fh0 = this.functionTypeFingerprint[fullId * 4];
|
||||
const fh1 = this.functionTypeFingerprint[(fullId * 4) + 1];
|
||||
const fh2 = this.functionTypeFingerprint[(fullId * 4) + 2];
|
||||
const [qh0, qh1, qh2] = queryFingerprint;
|
||||
// Approximate set intersection with bloom filters.
|
||||
// This can be larger than reality, not smaller, because hashes have
|
||||
// the property that if they've got the same value, they hash to the
|
||||
// same thing. False positives exist, but not false negatives.
|
||||
const [in0, in1, in2] = [fh0 & qh0, fh1 & qh1, fh2 & qh2];
|
||||
// Approximate the set of items in the query but not the function.
|
||||
// This might be smaller than reality, but cannot be bigger.
|
||||
//
|
||||
// | in_ | qh_ | XOR | Meaning |
|
||||
// | --- | --- | --- | ------------------------------------------------ |
|
||||
// | 0 | 0 | 0 | Not present |
|
||||
// | 1 | 0 | 1 | IMPOSSIBLE because `in_` is `fh_ & qh_` |
|
||||
// | 1 | 1 | 0 | If one or both is false positive, false negative |
|
||||
// | 0 | 1 | 1 | Since in_ has no false negatives, must be real |
|
||||
if ((in0 ^ qh0) || (in1 ^ qh1) || (in2 ^ qh2)) {
|
||||
return null;
|
||||
}
|
||||
return this.functionTypeFingerprint[(fullId * 4) + 3];
|
||||
};
|
||||
|
||||
|
||||
const innerRunQuery = () => {
|
||||
if (parsedQuery.foundElems === 1 && !parsedQuery.hasReturnArrow) {
|
||||
const elem = parsedQuery.elems[0];
|
||||
// use arrow functions to preserve `this`.
|
||||
/** @type {function(number): void} */
|
||||
const handleNameSearch = id => {
|
||||
const row = this.searchIndex[id];
|
||||
if (!typePassesFilter(elem.typeFilter, row.ty) ||
|
||||
const typeFilter = itemTypeFromName(elem.typeFilter);
|
||||
/**
|
||||
* @param {number} id
|
||||
* @returns {Promise<rustdoc.PlainResultObject?>}
|
||||
*/
|
||||
const handleNameSearch = async id => {
|
||||
const row = await this.getRow(id);
|
||||
if (!row || !row.entry) {
|
||||
return null;
|
||||
}
|
||||
if (!typePassesFilter(typeFilter, row.ty) ||
|
||||
(filterCrates !== null && row.crate !== filterCrates)) {
|
||||
return;
|
||||
return null;
|
||||
}
|
||||
|
||||
/** @type {number|null} */
|
||||
let pathDist = 0;
|
||||
if (elem.fullPath.length > 1) {
|
||||
|
||||
const maybePathDist = checkPath(elem.pathWithoutLast, row);
|
||||
if (maybePathDist === null) {
|
||||
return;
|
||||
pathDist = checkRowPath(elem.pathWithoutLast, row);
|
||||
if (pathDist === null) {
|
||||
return null;
|
||||
}
|
||||
pathDist = maybePathDist;
|
||||
}
|
||||
|
||||
if (parsedQuery.literalSearch) {
|
||||
if (row.word === elem.pathLast) {
|
||||
addIntoResults(results_others, row.id, id, 0, 0, pathDist, 0);
|
||||
}
|
||||
} else {
|
||||
addIntoResults(
|
||||
results_others,
|
||||
row.id,
|
||||
return row.name.toLowerCase() === elem.pathLast ? {
|
||||
id,
|
||||
row.normalizedName.indexOf(elem.normalizedPathLast),
|
||||
editDistance(
|
||||
dist: 0,
|
||||
path_dist: 0,
|
||||
index: 0,
|
||||
elems: [], // only used in type-based queries
|
||||
returned: [], // only used in type-based queries
|
||||
is_alias: false,
|
||||
} : null;
|
||||
} else {
|
||||
return {
|
||||
id,
|
||||
dist: editDistance(
|
||||
row.normalizedName,
|
||||
elem.normalizedPathLast,
|
||||
maxEditDistance,
|
||||
),
|
||||
pathDist,
|
||||
maxEditDistance,
|
||||
path_dist: pathDist,
|
||||
index: row.normalizedName.indexOf(elem.normalizedPathLast),
|
||||
elems: [], // only used in type-based queries
|
||||
returned: [], // only used in type-based queries
|
||||
is_alias: false,
|
||||
};
|
||||
}
|
||||
};
|
||||
if (elem.normalizedPathLast === "") {
|
||||
// faster full-table scan for this specific case.
|
||||
const nameData = this.database.getData("name");
|
||||
const l = nameData ? nameData.length : 0;
|
||||
for (let id = 0; id < l; ++id) {
|
||||
if (!idDuplicates.has(id)) {
|
||||
idDuplicates.add(id);
|
||||
prefixResults.push(handleNameSearch(id));
|
||||
}
|
||||
if (yield* flush(prefixResults)) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
return;
|
||||
}
|
||||
const results = await index.search(elem.normalizedPathLast);
|
||||
if (results) {
|
||||
for await (const result of results.prefixMatches()) {
|
||||
for (const id of result.entries()) {
|
||||
if (!idDuplicates.has(id)) {
|
||||
idDuplicates.add(id);
|
||||
prefixResults.push(handleNameSearch(id));
|
||||
const [name, alias] = await Promise.all([
|
||||
this.getName(id),
|
||||
this.getAliasTarget(id),
|
||||
]);
|
||||
if (name !== null && alias !== null) {
|
||||
prefixResults.push(handleAlias(name, alias, 0, 0));
|
||||
}
|
||||
}
|
||||
}
|
||||
if (yield* flush(prefixResults)) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
if (yield* flush(prefixResults)) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
const levSearchResults = index.searchLev(elem.normalizedPathLast);
|
||||
const levResults = [];
|
||||
for await (const levResult of levSearchResults) {
|
||||
for (const id of levResult.matches().entries()) {
|
||||
if (!idDuplicates.has(id)) {
|
||||
idDuplicates.add(id);
|
||||
levResults.push(handleNameSearch(id));
|
||||
const [name, alias] = await Promise.all([
|
||||
this.getName(id),
|
||||
this.getAliasTarget(id),
|
||||
]);
|
||||
if (name !== null && alias !== null) {
|
||||
levResults.push(handleAlias(
|
||||
name,
|
||||
alias,
|
||||
editDistance(elem.normalizedPathLast, name, maxEditDistance),
|
||||
name.indexOf(elem.normalizedPathLast),
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
yield* flush(levResults);
|
||||
if (results) {
|
||||
const substringResults = [];
|
||||
for await (const result of results.substringMatches()) {
|
||||
for (const id of result.entries()) {
|
||||
if (!idDuplicates.has(id)) {
|
||||
idDuplicates.add(id);
|
||||
substringResults.push(handleNameSearch(id));
|
||||
const [name, alias] = await Promise.all([
|
||||
this.getName(id),
|
||||
this.getAliasTarget(id),
|
||||
]);
|
||||
if (name !== null && alias !== null) {
|
||||
levResults.push(handleAlias(
|
||||
name,
|
||||
alias,
|
||||
editDistance(
|
||||
elem.normalizedPathLast,
|
||||
name,
|
||||
maxEditDistance,
|
||||
),
|
||||
name.indexOf(elem.normalizedPathLast),
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
if (yield* flush(substringResults)) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
.bind(this);
|
||||
|
||||
const innerRunTypeQuery =
|
||||
/**
|
||||
* @this {DocSearch}
|
||||
* @param {rustdoc.ParserQueryElement[]} inputs
|
||||
* @param {rustdoc.ParserQueryElement[]} output
|
||||
* @param {"sig"|"elems"|"returned"|null} typeInfo
|
||||
* @param {string} currentCrate
|
||||
* @returns {AsyncGenerator<rustdoc.ResultObject>}
|
||||
*/
|
||||
async function*(inputs, output, typeInfo, currentCrate) {
|
||||
const index = this.database.getIndex("normalizedName");
|
||||
if (!index) {
|
||||
return;
|
||||
}
|
||||
/** @type {Map<string, number>} */
|
||||
const genericMap = new Map();
|
||||
/**
|
||||
* @template Q
|
||||
* @typedef {{
|
||||
* invertedIndex: stringdex.RoaringBitmap[],
|
||||
* queryElem: Q,
|
||||
* }} PostingsList
|
||||
*/
|
||||
/** @type {stringdex.RoaringBitmap[]} */
|
||||
const empty_inverted_index = [];
|
||||
/** @type {PostingsList<any>[]} */
|
||||
const empty_postings_list = [];
|
||||
/** @type {stringdex.RoaringBitmap[]} */
|
||||
const everything_inverted_index = [];
|
||||
for (let i = 0; i < 64; ++i) {
|
||||
everything_inverted_index.push(RoaringBitmap.everything());
|
||||
}
|
||||
/**
|
||||
* @type {PostingsList<rustdoc.QueryElement[]>}
|
||||
*/
|
||||
const everything_postings_list = {
|
||||
invertedIndex: everything_inverted_index,
|
||||
queryElem: [],
|
||||
};
|
||||
/**
|
||||
* @type {PostingsList<rustdoc.QueryElement[]>[]}
|
||||
*/
|
||||
const nested_everything_postings_list = [everything_postings_list];
|
||||
/**
|
||||
* @param {...stringdex.RoaringBitmap[]} idx
|
||||
* @returns {stringdex.RoaringBitmap[]}
|
||||
*/
|
||||
const intersectInvertedIndexes = (...idx) => {
|
||||
let i = 0;
|
||||
const l = idx.length;
|
||||
while (i < l - 1 && idx[i] === everything_inverted_index) {
|
||||
i += 1;
|
||||
}
|
||||
const result = [...idx[i]];
|
||||
for (; i < l; ++i) {
|
||||
if (idx[i] === everything_inverted_index) {
|
||||
continue;
|
||||
}
|
||||
if (idx[i].length < result.length) {
|
||||
result.length = idx[i].length;
|
||||
}
|
||||
for (let j = 0; j < result.length; ++j) {
|
||||
result[j] = result[j].intersection(idx[i][j]);
|
||||
}
|
||||
}
|
||||
return result;
|
||||
};
|
||||
/**
|
||||
* Fetch a bitmap of potentially-matching functions,
|
||||
* plus a list of query elements annotated with the correct IDs.
|
||||
*
|
||||
* More than one ID can exist because, for example, q=`Iter` can match
|
||||
* `std::vec::Iter`, or `std::btree_set::Iter`, or anything else, and those
|
||||
* items different IDs. What's worse, q=`Iter<Iter>` has N**2 possible
|
||||
* matches, because it could be `vec::Iter<btree_set::Iter>`,
|
||||
* `btree_set::Iter<vec::Iter>`, `vec::Iter<vec::Iter>`,
|
||||
* `btree_set::Iter<btree_set::Iter>`,
|
||||
* or anything else. This function returns all possible permutations.
|
||||
*
|
||||
* @param {rustdoc.ParserQueryElement|null} elem
|
||||
* @returns {Promise<PostingsList<rustdoc.QueryElement>[]>}
|
||||
*/
|
||||
const unpackPostingsList = async elem => {
|
||||
if (!elem) {
|
||||
return empty_postings_list;
|
||||
}
|
||||
const typeFilter = itemTypeFromName(elem.typeFilter);
|
||||
const searchResults = await index.search(elem.normalizedPathLast);
|
||||
/**
|
||||
* @type {Promise<[
|
||||
* number,
|
||||
* string|null,
|
||||
* rustdoc.TypeData|null,
|
||||
* rustdoc.PathData|null,
|
||||
* ]>[]}
|
||||
* */
|
||||
const typePromises = [];
|
||||
if (typeFilter !== TY_GENERIC && searchResults) {
|
||||
for (const id of searchResults.matches().entries()) {
|
||||
typePromises.push(Promise.all([
|
||||
this.getName(id),
|
||||
this.getTypeData(id),
|
||||
this.getPathData(id),
|
||||
]).then(([name, typeData, pathData]) =>
|
||||
[id, name, typeData, pathData]));
|
||||
}
|
||||
}
|
||||
const types = (await Promise.all(typePromises))
|
||||
.filter(([_id, name, ty, path]) =>
|
||||
name !== null && name.toLowerCase() === elem.pathLast &&
|
||||
ty && !ty.invertedFunctionSignatureIndex.every(bitmap => {
|
||||
return bitmap.isEmpty();
|
||||
}) &&
|
||||
path && path.ty !== TY_ASSOCTYPE &&
|
||||
(elem.pathWithoutLast.length === 0 ||
|
||||
checkPath(
|
||||
elem.pathWithoutLast,
|
||||
path.modulePath.split("::"),
|
||||
) === 0),
|
||||
);
|
||||
if (types.length === 0) {
|
||||
const areGenericsAllowed = typeFilter === TY_GENERIC || (
|
||||
typeFilter === -1 &&
|
||||
(parsedQuery.totalElems > 1 || parsedQuery.hasReturnArrow) &&
|
||||
elem.pathWithoutLast.length === 0 &&
|
||||
elem.generics.length === 0 &&
|
||||
elem.bindings.size === 0
|
||||
);
|
||||
}
|
||||
};
|
||||
if (elem.normalizedPathLast !== "") {
|
||||
const last = elem.normalizedPathLast;
|
||||
for (const id of this.nameTrie.search(last, this.tailTable)) {
|
||||
handleNameSearch(id);
|
||||
}
|
||||
}
|
||||
const length = this.searchIndex.length;
|
||||
|
||||
for (let i = 0, nSearchIndex = length; i < nSearchIndex; ++i) {
|
||||
// queries that end in :: bypass the trie
|
||||
if (elem.normalizedPathLast === "") {
|
||||
handleNameSearch(i);
|
||||
}
|
||||
const row = this.searchIndex[i];
|
||||
if (filterCrates !== null && row.crate !== filterCrates) {
|
||||
continue;
|
||||
}
|
||||
const tfpDist = compareTypeFingerprints(
|
||||
row.id,
|
||||
parsedQuery.typeFingerprint,
|
||||
);
|
||||
if (tfpDist !== null) {
|
||||
const in_args = row.type && row.type.inputs
|
||||
&& checkIfInList(row.type.inputs, elem, row.type.where_clause, null, 0);
|
||||
const returned = row.type && row.type.output
|
||||
&& checkIfInList(row.type.output, elem, row.type.where_clause, null, 0);
|
||||
if (in_args) {
|
||||
results_in_args.max_dist = Math.max(
|
||||
results_in_args.max_dist || 0,
|
||||
tfpDist,
|
||||
);
|
||||
const maxDist = results_in_args.size < MAX_RESULTS ?
|
||||
(tfpDist + 1) :
|
||||
results_in_args.max_dist;
|
||||
addIntoResults(results_in_args, row.id, i, -1, tfpDist, 0, maxDist);
|
||||
if (typeFilter !== TY_GENERIC &&
|
||||
(elem.name.length >= 3 || !areGenericsAllowed)
|
||||
) {
|
||||
/** @type {string|null} */
|
||||
let chosenName = null;
|
||||
/** @type {rustdoc.TypeData[]} */
|
||||
let chosenType = [];
|
||||
/** @type {rustdoc.PathData[]} */
|
||||
let chosenPath = [];
|
||||
/** @type {number[]} */
|
||||
let chosenId = [];
|
||||
let chosenDist = Number.MAX_SAFE_INTEGER;
|
||||
const levResults = index.searchLev(elem.normalizedPathLast);
|
||||
for await (const searchResults of levResults) {
|
||||
for (const id of searchResults.matches().entries()) {
|
||||
const [name, ty, path] = await Promise.all([
|
||||
this.getName(id),
|
||||
this.getTypeData(id),
|
||||
this.getPathData(id),
|
||||
]);
|
||||
if (name !== null && ty !== null && path !== null &&
|
||||
!ty.invertedFunctionSignatureIndex.every(bitmap => {
|
||||
return bitmap.isEmpty();
|
||||
}) &&
|
||||
path.ty !== TY_ASSOCTYPE
|
||||
) {
|
||||
let dist = editDistance(
|
||||
name,
|
||||
elem.pathLast,
|
||||
maxEditDistance,
|
||||
);
|
||||
if (elem.pathWithoutLast.length !== 0) {
|
||||
const pathDist = checkPath(
|
||||
elem.pathWithoutLast,
|
||||
path.modulePath.split("::"),
|
||||
);
|
||||
// guaranteed to be higher than the path limit
|
||||
dist += pathDist === null ?
|
||||
Number.MAX_SAFE_INTEGER :
|
||||
pathDist;
|
||||
}
|
||||
if (name === chosenName) {
|
||||
chosenId.push(id);
|
||||
chosenType.push(ty);
|
||||
chosenPath.push(path);
|
||||
} else if (dist < chosenDist) {
|
||||
chosenName = name;
|
||||
chosenId = [id];
|
||||
chosenType = [ty];
|
||||
chosenPath = [path];
|
||||
chosenDist = dist;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (chosenId.length !== 0) {
|
||||
// searchLev returns results in order
|
||||
// if we have working matches, we're done
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (areGenericsAllowed) {
|
||||
parsedQuery.proposeCorrectionFrom = elem.name;
|
||||
parsedQuery.proposeCorrectionTo = chosenName;
|
||||
} else {
|
||||
parsedQuery.correction = chosenName;
|
||||
for (let i = 0; i < chosenType.length; ++i) {
|
||||
types.push([
|
||||
chosenId[i],
|
||||
chosenName,
|
||||
chosenType[i],
|
||||
chosenPath[i],
|
||||
]);
|
||||
}
|
||||
}
|
||||
}
|
||||
if (returned) {
|
||||
results_returned.max_dist = Math.max(
|
||||
results_returned.max_dist || 0,
|
||||
tfpDist,
|
||||
);
|
||||
const maxDist = results_returned.size < MAX_RESULTS ?
|
||||
(tfpDist + 1) :
|
||||
results_returned.max_dist;
|
||||
addIntoResults(results_returned, row.id, i, -1, tfpDist, 0, maxDist);
|
||||
if (areGenericsAllowed) {
|
||||
let genericId = genericMap.get(elem.normalizedPathLast);
|
||||
if (genericId === undefined) {
|
||||
genericId = genericMap.size;
|
||||
genericMap.set(elem.normalizedPathLast, genericId);
|
||||
}
|
||||
return [{
|
||||
invertedIndex: await this.getGenericInvertedIndex(genericId),
|
||||
queryElem: {
|
||||
name: elem.name,
|
||||
id: (-genericId) - 1,
|
||||
typeFilter: TY_GENERIC,
|
||||
generics: [],
|
||||
bindings: EMPTY_BINDINGS_MAP,
|
||||
fullPath: elem.fullPath,
|
||||
pathLast: elem.pathLast,
|
||||
normalizedPathLast: elem.normalizedPathLast,
|
||||
pathWithoutLast: elem.pathWithoutLast,
|
||||
},
|
||||
}];
|
||||
}
|
||||
}
|
||||
types.sort(([_i, name1, _t, pathData1], [_i2, name2, _t2, pathData2]) => {
|
||||
const p1 = !pathData1 ? "" : pathData1.modulePath;
|
||||
const p2 = !pathData2 ? "" : pathData2.modulePath;
|
||||
const n1 = name1 === null ? "" : name1;
|
||||
const n2 = name2 === null ? "" : name2;
|
||||
if (p1.length !== p2.length) {
|
||||
return p1.length > p2.length ? +1 : -1;
|
||||
}
|
||||
if (n1.length !== n2.length) {
|
||||
return n1.length > n2.length ? +1 : -1;
|
||||
}
|
||||
if (n1 !== n2) {
|
||||
return n1 > n2 ? +1 : -1;
|
||||
}
|
||||
if (p1 !== p2) {
|
||||
return p1 > p2 ? +1 : -1;
|
||||
}
|
||||
return 0;
|
||||
});
|
||||
/** @type {PostingsList<rustdoc.QueryElement>[]} */
|
||||
const results = [];
|
||||
for (const [id, _name, typeData] of types) {
|
||||
if (!typeData || typeData.invertedFunctionSignatureIndex.every(bitmap => {
|
||||
return bitmap.isEmpty();
|
||||
})) {
|
||||
continue;
|
||||
}
|
||||
const upla = await unpackPostingsListAll(elem.generics);
|
||||
const uplb = await unpackPostingsListBindings(elem.bindings);
|
||||
for (const {invertedIndex: genericsIdx, queryElem: generics} of upla) {
|
||||
for (const {invertedIndex: bindingsIdx, queryElem: bindings} of uplb) {
|
||||
results.push({
|
||||
invertedIndex: intersectInvertedIndexes(
|
||||
typeData.invertedFunctionSignatureIndex,
|
||||
genericsIdx,
|
||||
bindingsIdx,
|
||||
),
|
||||
queryElem: {
|
||||
name: elem.name,
|
||||
id,
|
||||
typeFilter,
|
||||
generics,
|
||||
bindings,
|
||||
fullPath: elem.fullPath,
|
||||
pathLast: elem.pathLast,
|
||||
normalizedPathLast: elem.normalizedPathLast,
|
||||
pathWithoutLast: elem.pathWithoutLast,
|
||||
},
|
||||
});
|
||||
if ((results.length & 0x7F) === 0) {
|
||||
await yieldToBrowser();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return results;
|
||||
};
|
||||
/**
|
||||
* Fetch all possible matching permutations of a list of query elements.
|
||||
*
|
||||
* The empty list returns an "identity postings list", with a bitmap that
|
||||
* matches everything and an empty list of elems. This allows you to safely
|
||||
* take the intersection of this bitmap.
|
||||
*
|
||||
* @param {(rustdoc.ParserQueryElement|null)[]|null} elems
|
||||
* @returns {Promise<PostingsList<rustdoc.QueryElement[]>[]>}
|
||||
*/
|
||||
const unpackPostingsListAll = async elems => {
|
||||
if (!elems || elems.length === 0) {
|
||||
return nested_everything_postings_list;
|
||||
}
|
||||
const [firstPostingsList, remainingAll] = await Promise.all([
|
||||
unpackPostingsList(elems[0]),
|
||||
unpackPostingsListAll(elems.slice(1)),
|
||||
]);
|
||||
/** @type {PostingsList<rustdoc.QueryElement[]>[]} */
|
||||
const results = [];
|
||||
for (const {
|
||||
invertedIndex: firstIdx,
|
||||
queryElem: firstElem,
|
||||
} of firstPostingsList) {
|
||||
for (const {
|
||||
invertedIndex: remainingIdx,
|
||||
queryElem: remainingElems,
|
||||
} of remainingAll) {
|
||||
results.push({
|
||||
invertedIndex: intersectInvertedIndexes(firstIdx, remainingIdx),
|
||||
queryElem: [firstElem, ...remainingElems],
|
||||
});
|
||||
if ((results.length & 0x7F) === 0) {
|
||||
await yieldToBrowser();
|
||||
}
|
||||
}
|
||||
}
|
||||
return results;
|
||||
};
|
||||
/**
|
||||
* Fetch all possible matching permutations of a map query element bindings.
|
||||
*
|
||||
* The empty list returns an "identity postings list", with a bitmap that
|
||||
* matches everything and an empty list of elems. This allows you to safely
|
||||
* take the intersection of this bitmap.
|
||||
*
|
||||
* Heads up! This function mutates the Map that you provide.
|
||||
* Before passing an actual parser item to it, make sure to clone the map.
|
||||
*
|
||||
* @param {Map<string, rustdoc.ParserQueryElement[]>} elems
|
||||
* @returns {Promise<PostingsList<
|
||||
* Map<number, rustdoc.QueryElement[]>,
|
||||
* >[]>}
|
||||
*/
|
||||
const unpackPostingsListBindings = async elems => {
|
||||
if (!elems) {
|
||||
return [{
|
||||
invertedIndex: everything_inverted_index,
|
||||
queryElem: new Map(),
|
||||
}];
|
||||
}
|
||||
const firstKey = elems.keys().next().value;
|
||||
if (firstKey === undefined) {
|
||||
return [{
|
||||
invertedIndex: everything_inverted_index,
|
||||
queryElem: new Map(),
|
||||
}];
|
||||
}
|
||||
const firstList = elems.get(firstKey);
|
||||
if (firstList === undefined) {
|
||||
return [{
|
||||
invertedIndex: everything_inverted_index,
|
||||
queryElem: new Map(),
|
||||
}];
|
||||
}
|
||||
const firstKeyIds = await index.search(firstKey);
|
||||
if (!firstKeyIds) {
|
||||
// User specified a non-existent key.
|
||||
return [{
|
||||
invertedIndex: empty_inverted_index,
|
||||
queryElem: new Map(),
|
||||
}];
|
||||
}
|
||||
elems.delete(firstKey);
|
||||
const [firstPostingsList, remainingAll] = await Promise.all([
|
||||
unpackPostingsListAll(firstList),
|
||||
unpackPostingsListBindings(elems),
|
||||
]);
|
||||
/** @type {PostingsList<Map<number, rustdoc.QueryElement[]>>[]} */
|
||||
const results = [];
|
||||
for (const keyId of firstKeyIds.matches().entries()) {
|
||||
for (const {
|
||||
invertedIndex: firstIdx,
|
||||
queryElem: firstElem,
|
||||
} of firstPostingsList) {
|
||||
for (const {
|
||||
invertedIndex: remainingIdx,
|
||||
queryElem: remainingElems,
|
||||
} of remainingAll) {
|
||||
const elems = new Map(remainingElems);
|
||||
elems.set(keyId, firstElem);
|
||||
results.push({
|
||||
invertedIndex: intersectInvertedIndexes(firstIdx, remainingIdx),
|
||||
queryElem: elems,
|
||||
});
|
||||
if ((results.length & 0x7F) === 0) {
|
||||
await yieldToBrowser();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
elems.set(firstKey, firstList);
|
||||
if (results.length === 0) {
|
||||
// User specified a non-existent key.
|
||||
return [{
|
||||
invertedIndex: empty_inverted_index,
|
||||
queryElem: new Map(),
|
||||
}];
|
||||
}
|
||||
return results;
|
||||
};
|
||||
|
||||
// finally, we can do the actual unification loop
|
||||
const [allInputs, allOutput] = await Promise.all([
|
||||
unpackPostingsListAll(inputs),
|
||||
unpackPostingsListAll(output),
|
||||
]);
|
||||
let checkCounter = 0;
|
||||
/**
|
||||
* Finally, we can perform an incremental search, sorted by the number of
|
||||
* entries that match a given query.
|
||||
*
|
||||
* The outer list gives the number of elements. The inner one is separate
|
||||
* for each distinct name resolution.
|
||||
*
|
||||
* @type {{
|
||||
* bitmap: stringdex.RoaringBitmap,
|
||||
* inputs: rustdoc.QueryElement[],
|
||||
* output: rustdoc.QueryElement[],
|
||||
* }[][]}
|
||||
*/
|
||||
const queryPlan = [];
|
||||
for (const {invertedIndex: inputsIdx, queryElem: inputs} of allInputs) {
|
||||
for (const {invertedIndex: outputIdx, queryElem: output} of allOutput) {
|
||||
const invertedIndex = intersectInvertedIndexes(inputsIdx, outputIdx);
|
||||
for (const [size, bitmap] of invertedIndex.entries()) {
|
||||
checkCounter += 1;
|
||||
if ((checkCounter & 0x7F) === 0) {
|
||||
await yieldToBrowser();
|
||||
}
|
||||
if (!queryPlan[size]) {
|
||||
queryPlan[size] = [];
|
||||
}
|
||||
queryPlan[size].push({
|
||||
bitmap, inputs, output,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
} else if (parsedQuery.foundElems > 0) {
|
||||
// Sort input and output so that generic type variables go first and
|
||||
// types with generic parameters go last.
|
||||
// That's because of the way unification is structured: it eats off
|
||||
// the end, and hits a fast path if the last item is a simple atom.
|
||||
/** @type {function(rustdoc.QueryElement, rustdoc.QueryElement): number} */
|
||||
const sortQ = (a, b) => {
|
||||
const ag = a.generics.length === 0 && a.bindings.size === 0;
|
||||
const bg = b.generics.length === 0 && b.bindings.size === 0;
|
||||
if (ag !== bg) {
|
||||
// unary `+` converts booleans into integers.
|
||||
return +ag - +bg;
|
||||
const resultPromises = [];
|
||||
const dedup = new Set();
|
||||
let resultCounter = 0;
|
||||
const isReturnTypeQuery = inputs.length === 0;
|
||||
/** @type {rustdoc.PlainResultObject[]} */
|
||||
const pushToBottom = [];
|
||||
plan: for (const queryStep of queryPlan) {
|
||||
for (const {bitmap, inputs, output} of queryStep) {
|
||||
for (const id of bitmap.entries()) {
|
||||
checkCounter += 1;
|
||||
if ((checkCounter & 0x7F) === 0) {
|
||||
await yieldToBrowser();
|
||||
}
|
||||
resultPromises.push(this.getFunctionData(id).then(async fnData => {
|
||||
if (!fnData || !fnData.functionSignature) {
|
||||
return null;
|
||||
}
|
||||
checkCounter += 1;
|
||||
if ((checkCounter & 0x7F) === 0) {
|
||||
await yieldToBrowser();
|
||||
}
|
||||
const functionSignature = fnData.functionSignature;
|
||||
if (!unifyFunctionTypes(
|
||||
functionSignature.inputs,
|
||||
inputs,
|
||||
functionSignature.where_clause,
|
||||
null,
|
||||
mgens => {
|
||||
return !!unifyFunctionTypes(
|
||||
functionSignature.output,
|
||||
output,
|
||||
functionSignature.where_clause,
|
||||
mgens,
|
||||
checkTypeMgensForConflict,
|
||||
0, // unboxing depth
|
||||
);
|
||||
},
|
||||
0, // unboxing depth
|
||||
)) {
|
||||
return null;
|
||||
}
|
||||
const result = {
|
||||
id,
|
||||
dist: fnData.elemCount,
|
||||
path_dist: 0,
|
||||
index: -1,
|
||||
elems: inputs,
|
||||
returned: output,
|
||||
is_alias: false,
|
||||
};
|
||||
const entry = await this.getEntryData(id);
|
||||
if ((entry && !isFnLikeTy(entry.ty)) ||
|
||||
(isReturnTypeQuery &&
|
||||
functionSignature &&
|
||||
containsTypeFromQuery(
|
||||
output,
|
||||
functionSignature.inputs,
|
||||
functionSignature.where_clause,
|
||||
)
|
||||
)
|
||||
) {
|
||||
pushToBottom.push(result);
|
||||
return null;
|
||||
}
|
||||
return result;
|
||||
}));
|
||||
}
|
||||
}
|
||||
const ai = a.id !== null && a.id > 0;
|
||||
const bi = b.id !== null && b.id > 0;
|
||||
return +ai - +bi;
|
||||
};
|
||||
parsedQuery.elems.sort(sortQ);
|
||||
parsedQuery.returned.sort(sortQ);
|
||||
for (let i = 0, nSearchIndex = this.searchIndex.length; i < nSearchIndex; ++i) {
|
||||
handleArgs(this.searchIndex[i], i, results_others);
|
||||
for await (const result of sortAndTransformResults(
|
||||
await Promise.all(resultPromises),
|
||||
typeInfo,
|
||||
currentCrate,
|
||||
dedup,
|
||||
)) {
|
||||
if (resultCounter >= MAX_RESULTS) {
|
||||
break plan;
|
||||
}
|
||||
yield result;
|
||||
resultCounter += 1;
|
||||
}
|
||||
resultPromises.length = 0;
|
||||
}
|
||||
if (resultCounter >= MAX_RESULTS) {
|
||||
return;
|
||||
}
|
||||
for await (const result of sortAndTransformResults(
|
||||
await Promise.all(pushToBottom),
|
||||
typeInfo,
|
||||
currentCrate,
|
||||
dedup,
|
||||
)) {
|
||||
if (resultCounter >= MAX_RESULTS) {
|
||||
break;
|
||||
}
|
||||
yield result;
|
||||
resultCounter += 1;
|
||||
}
|
||||
}
|
||||
};
|
||||
.bind(this);
|
||||
|
||||
if (parsedQuery.error === null) {
|
||||
innerRunQuery();
|
||||
if (parsedQuery.foundElems === 1 && !parsedQuery.hasReturnArrow) {
|
||||
// We never want the main tab to delay behind the other two tabs.
|
||||
// This is a bit of a hack (because JS's scheduler doesn't have much of an API),
|
||||
// along with making innerRunTypeQuery yield to the UI thread.
|
||||
const {
|
||||
promise: donePromise,
|
||||
resolve: doneResolve,
|
||||
reject: doneReject,
|
||||
} = Promise.withResolvers();
|
||||
const doneTimeout = timeout(250);
|
||||
return {
|
||||
"in_args": (async function*() {
|
||||
await Promise.race([donePromise, doneTimeout]);
|
||||
yield* innerRunTypeQuery(parsedQuery.elems, [], "elems", currentCrate);
|
||||
})(),
|
||||
"returned": (async function*() {
|
||||
await Promise.race([donePromise, doneTimeout]);
|
||||
yield* innerRunTypeQuery([], parsedQuery.elems, "returned", currentCrate);
|
||||
})(),
|
||||
"others": (async function*() {
|
||||
try {
|
||||
yield* innerRunNameQuery(currentCrate);
|
||||
doneResolve(null);
|
||||
} catch (e) {
|
||||
doneReject(e);
|
||||
throw e;
|
||||
}
|
||||
})(),
|
||||
"query": parsedQuery,
|
||||
};
|
||||
} else if (parsedQuery.error !== null) {
|
||||
return {
|
||||
"in_args": (async function*() {})(),
|
||||
"returned": (async function*() {})(),
|
||||
"others": innerRunNameQuery(currentCrate),
|
||||
"query": parsedQuery,
|
||||
};
|
||||
} else {
|
||||
const typeInfo = parsedQuery.elems.length === 0 ?
|
||||
"returned" : (
|
||||
parsedQuery.returned.length === 0 ? "elems" : "sig"
|
||||
);
|
||||
return {
|
||||
"in_args": (async function*() {})(),
|
||||
"returned": (async function*() {})(),
|
||||
"others": parsedQuery.foundElems === 0 ?
|
||||
(async function*() {})() :
|
||||
innerRunTypeQuery(
|
||||
parsedQuery.elems,
|
||||
parsedQuery.returned,
|
||||
typeInfo,
|
||||
currentCrate,
|
||||
),
|
||||
"query": parsedQuery,
|
||||
};
|
||||
}
|
||||
|
||||
const isType = parsedQuery.foundElems !== 1 || parsedQuery.hasReturnArrow;
|
||||
const [sorted_in_args, sorted_returned, sorted_others] = await Promise.all([
|
||||
sortResults(results_in_args, "elems", currentCrate),
|
||||
sortResults(results_returned, "returned", currentCrate),
|
||||
// @ts-expect-error
|
||||
sortResults(results_others, (isType ? "query" : null), currentCrate),
|
||||
]);
|
||||
const ret = createQueryResults(
|
||||
sorted_in_args,
|
||||
sorted_returned,
|
||||
sorted_others,
|
||||
parsedQuery);
|
||||
await handleAliases(ret, parsedQuery.userQuery.replace(/"/g, ""),
|
||||
filterCrates, currentCrate);
|
||||
await Promise.all([ret.others, ret.returned, ret.in_args].map(async list => {
|
||||
const descs = await Promise.all(list.map(result => {
|
||||
// @ts-expect-error
|
||||
return this.searchIndexEmptyDesc.get(result.crate).contains(result.bitIndex) ?
|
||||
"" :
|
||||
// @ts-expect-error
|
||||
this.searchState.loadDesc(result);
|
||||
}));
|
||||
for (const [i, result] of list.entries()) {
|
||||
// @ts-expect-error
|
||||
result.desc = descs[i];
|
||||
}
|
||||
}));
|
||||
if (parsedQuery.error !== null && ret.others.length !== 0) {
|
||||
// It means some doc aliases were found so let's "remove" the error!
|
||||
ret.query.error = null;
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// ==================== Core search logic end ====================
|
||||
|
||||
/** @type {Map<string, rustdoc.RawSearchIndexCrate>} */
|
||||
let rawSearchIndex;
|
||||
// @ts-expect-error
|
||||
/** @type {DocSearch} */
|
||||
let docSearch;
|
||||
const longItemTypes = [
|
||||
"keyword",
|
||||
@@ -4762,12 +4596,8 @@ function buildUrl(search, filterCrates) {
|
||||
function getFilterCrates() {
|
||||
const elem = document.getElementById("crate-search");
|
||||
|
||||
if (elem &&
|
||||
// @ts-expect-error
|
||||
elem.value !== "all crates" &&
|
||||
// @ts-expect-error
|
||||
window.searchIndex.has(elem.value)
|
||||
) {
|
||||
// @ts-expect-error
|
||||
if (elem && elem.value !== "all crates") {
|
||||
// @ts-expect-error
|
||||
return elem.value;
|
||||
}
|
||||
@@ -4777,8 +4607,7 @@ function getFilterCrates() {
|
||||
// @ts-expect-error
|
||||
function nextTab(direction) {
|
||||
const next = (searchState.currentTab + direction + 3) % searchState.focusedByTab.length;
|
||||
// @ts-expect-error
|
||||
searchState.focusedByTab[searchState.currentTab] = document.activeElement;
|
||||
window.searchState.focusedByTab[searchState.currentTab] = document.activeElement;
|
||||
printTab(next);
|
||||
focusSearchResult();
|
||||
}
|
||||
@@ -4790,133 +4619,182 @@ function focusSearchResult() {
|
||||
document.querySelectorAll(".search-results.active a").item(0) ||
|
||||
document.querySelectorAll("#search-tabs button").item(searchState.currentTab);
|
||||
searchState.focusedByTab[searchState.currentTab] = null;
|
||||
if (target) {
|
||||
// @ts-expect-error
|
||||
if (target && target instanceof HTMLElement) {
|
||||
target.focus();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Render a set of search results for a single tab.
|
||||
* @param {Array<?>} array - The search results for this tab
|
||||
* @param {rustdoc.ParsedQuery<rustdoc.QueryElement>} query
|
||||
* @param {AsyncGenerator<rustdoc.ResultObject>} results - The search results for this tab
|
||||
* @param {rustdoc.ParsedQuery<rustdoc.ParserQueryElement>} query
|
||||
* @param {boolean} display - True if this is the active tab
|
||||
* @param {function(number, HTMLElement): any} finishedCallback
|
||||
* @param {boolean} isTypeSearch
|
||||
* @returns {Promise<HTMLElement>}
|
||||
*/
|
||||
async function addTab(array, query, display) {
|
||||
async function addTab(results, query, display, finishedCallback, isTypeSearch) {
|
||||
const extraClass = display ? " active" : "";
|
||||
|
||||
const output = document.createElement(
|
||||
array.length === 0 && query.error === null ? "div" : "ul",
|
||||
);
|
||||
if (array.length > 0) {
|
||||
output.className = "search-results " + extraClass;
|
||||
/** @type {HTMLElement} */
|
||||
let output = document.createElement("ul");
|
||||
output.className = "search-results " + extraClass;
|
||||
|
||||
const lis = Promise.all(array.map(async item => {
|
||||
const name = item.is_alias ? item.original.name : item.name;
|
||||
const type = itemTypes[item.ty];
|
||||
const longType = longItemTypes[item.ty];
|
||||
const typeName = longType.length !== 0 ? `${longType}` : "?";
|
||||
let count = 0;
|
||||
|
||||
const link = document.createElement("a");
|
||||
link.className = "result-" + type;
|
||||
link.href = item.href;
|
||||
/** @type {Promise<string|null>[]} */
|
||||
const descList = [];
|
||||
|
||||
const resultName = document.createElement("span");
|
||||
resultName.className = "result-name";
|
||||
/** @param {rustdoc.ResultObject} obj */
|
||||
const addNextResultToOutput = async obj => {
|
||||
count += 1;
|
||||
|
||||
resultName.insertAdjacentHTML(
|
||||
"beforeend",
|
||||
`<span class="typename">${typeName}</span>`);
|
||||
link.appendChild(resultName);
|
||||
const name = obj.item.name;
|
||||
const type = itemTypes[obj.item.ty];
|
||||
const longType = longItemTypes[obj.item.ty];
|
||||
const typeName = longType.length !== 0 ? `${longType}` : "?";
|
||||
|
||||
let alias = " ";
|
||||
if (item.is_alias) {
|
||||
alias = ` <div class="alias">\
|
||||
<b>${item.name}</b><i class="grey"> - see </i>\
|
||||
const link = document.createElement("a");
|
||||
link.className = "result-" + type;
|
||||
link.href = obj.href;
|
||||
|
||||
const resultName = document.createElement("span");
|
||||
resultName.className = "result-name";
|
||||
|
||||
resultName.insertAdjacentHTML(
|
||||
"beforeend",
|
||||
`<span class="typename">${typeName}</span>`);
|
||||
link.appendChild(resultName);
|
||||
|
||||
let alias = " ";
|
||||
if (obj.alias !== undefined) {
|
||||
alias = ` <div class="alias">\
|
||||
<b>${obj.alias}</b><i class="grey"> - see </i>\
|
||||
</div>`;
|
||||
}
|
||||
resultName.insertAdjacentHTML(
|
||||
"beforeend",
|
||||
`<div class="path">${alias}\
|
||||
${item.displayPath}<span class="${type}">${name}</span>\
|
||||
}
|
||||
resultName.insertAdjacentHTML(
|
||||
"beforeend",
|
||||
`<div class="path">${alias}\
|
||||
${obj.displayPath}<span class="${type}">${name}</span>\
|
||||
</div>`);
|
||||
|
||||
const description = document.createElement("div");
|
||||
description.className = "desc";
|
||||
description.insertAdjacentHTML("beforeend", item.desc);
|
||||
if (item.displayTypeSignature) {
|
||||
const {type, mappedNames, whereClause} = await item.displayTypeSignature;
|
||||
const displayType = document.createElement("div");
|
||||
// @ts-expect-error
|
||||
type.forEach((value, index) => {
|
||||
if (index % 2 !== 0) {
|
||||
const highlight = document.createElement("strong");
|
||||
highlight.appendChild(document.createTextNode(value));
|
||||
displayType.appendChild(highlight);
|
||||
} else {
|
||||
displayType.appendChild(document.createTextNode(value));
|
||||
}
|
||||
});
|
||||
if (mappedNames.size > 0 || whereClause.size > 0) {
|
||||
let addWhereLineFn = () => {
|
||||
const line = document.createElement("div");
|
||||
line.className = "where";
|
||||
line.appendChild(document.createTextNode("where"));
|
||||
displayType.appendChild(line);
|
||||
addWhereLineFn = () => {};
|
||||
};
|
||||
for (const [qname, name] of mappedNames) {
|
||||
// don't care unless the generic name is different
|
||||
if (name === qname) {
|
||||
continue;
|
||||
}
|
||||
addWhereLineFn();
|
||||
const line = document.createElement("div");
|
||||
line.className = "where";
|
||||
line.appendChild(document.createTextNode(` ${qname} matches `));
|
||||
const lineStrong = document.createElement("strong");
|
||||
lineStrong.appendChild(document.createTextNode(name));
|
||||
line.appendChild(lineStrong);
|
||||
displayType.appendChild(line);
|
||||
}
|
||||
for (const [name, innerType] of whereClause) {
|
||||
// don't care unless there's at least one highlighted entry
|
||||
if (innerType.length <= 1) {
|
||||
continue;
|
||||
}
|
||||
addWhereLineFn();
|
||||
const line = document.createElement("div");
|
||||
line.className = "where";
|
||||
line.appendChild(document.createTextNode(` ${name}: `));
|
||||
// @ts-expect-error
|
||||
innerType.forEach((value, index) => {
|
||||
if (index % 2 !== 0) {
|
||||
const highlight = document.createElement("strong");
|
||||
highlight.appendChild(document.createTextNode(value));
|
||||
line.appendChild(highlight);
|
||||
} else {
|
||||
line.appendChild(document.createTextNode(value));
|
||||
}
|
||||
});
|
||||
displayType.appendChild(line);
|
||||
}
|
||||
}
|
||||
displayType.className = "type-signature";
|
||||
link.appendChild(displayType);
|
||||
}
|
||||
|
||||
link.appendChild(description);
|
||||
return link;
|
||||
}));
|
||||
lis.then(lis => {
|
||||
for (const li of lis) {
|
||||
output.appendChild(li);
|
||||
const description = document.createElement("div");
|
||||
description.className = "desc";
|
||||
obj.desc.then(desc => {
|
||||
if (desc !== null) {
|
||||
description.insertAdjacentHTML("beforeend", desc);
|
||||
}
|
||||
});
|
||||
} else if (query.error === null) {
|
||||
const dlroChannel = `https://doc.rust-lang.org/${getVar("channel")}`;
|
||||
descList.push(obj.desc);
|
||||
if (obj.displayTypeSignature) {
|
||||
const {type, mappedNames, whereClause} = await obj.displayTypeSignature;
|
||||
const displayType = document.createElement("div");
|
||||
type.forEach((value, index) => {
|
||||
if (index % 2 !== 0) {
|
||||
const highlight = document.createElement("strong");
|
||||
highlight.appendChild(document.createTextNode(value));
|
||||
displayType.appendChild(highlight);
|
||||
} else {
|
||||
displayType.appendChild(document.createTextNode(value));
|
||||
}
|
||||
});
|
||||
if (mappedNames.size > 0 || whereClause.size > 0) {
|
||||
let addWhereLineFn = () => {
|
||||
const line = document.createElement("div");
|
||||
line.className = "where";
|
||||
line.appendChild(document.createTextNode("where"));
|
||||
displayType.appendChild(line);
|
||||
addWhereLineFn = () => {};
|
||||
};
|
||||
for (const [qname, name] of mappedNames) {
|
||||
// don't care unless the generic name is different
|
||||
if (name === qname) {
|
||||
continue;
|
||||
}
|
||||
addWhereLineFn();
|
||||
const line = document.createElement("div");
|
||||
line.className = "where";
|
||||
line.appendChild(document.createTextNode(` ${qname} matches `));
|
||||
const lineStrong = document.createElement("strong");
|
||||
lineStrong.appendChild(document.createTextNode(name));
|
||||
line.appendChild(lineStrong);
|
||||
displayType.appendChild(line);
|
||||
}
|
||||
for (const [name, innerType] of whereClause) {
|
||||
// don't care unless there's at least one highlighted entry
|
||||
if (innerType.length <= 1) {
|
||||
continue;
|
||||
}
|
||||
addWhereLineFn();
|
||||
const line = document.createElement("div");
|
||||
line.className = "where";
|
||||
line.appendChild(document.createTextNode(` ${name}: `));
|
||||
innerType.forEach((value, index) => {
|
||||
if (index % 2 !== 0) {
|
||||
const highlight = document.createElement("strong");
|
||||
highlight.appendChild(document.createTextNode(value));
|
||||
line.appendChild(highlight);
|
||||
} else {
|
||||
line.appendChild(document.createTextNode(value));
|
||||
}
|
||||
});
|
||||
displayType.appendChild(line);
|
||||
}
|
||||
}
|
||||
displayType.className = "type-signature";
|
||||
link.appendChild(displayType);
|
||||
}
|
||||
|
||||
link.appendChild(description);
|
||||
output.appendChild(link);
|
||||
|
||||
results.next().then(async nextResult => {
|
||||
if (nextResult.value) {
|
||||
addNextResultToOutput(nextResult.value);
|
||||
} else {
|
||||
await Promise.all(descList);
|
||||
// need to make sure the element is shown before
|
||||
// running this callback
|
||||
yieldToBrowser().then(() => finishedCallback(count, output));
|
||||
}
|
||||
});
|
||||
};
|
||||
const firstResult = await results.next();
|
||||
let correctionOutput = "";
|
||||
if (query.correction !== null && isTypeSearch) {
|
||||
const orig = query.returned.length > 0
|
||||
? query.returned[0].name
|
||||
: query.elems[0].name;
|
||||
correctionOutput = "<h3 class=\"search-corrections\">" +
|
||||
`Type "${orig}" not found. ` +
|
||||
"Showing results for closest type name " +
|
||||
`"${query.correction}" instead.</h3>`;
|
||||
}
|
||||
if (query.proposeCorrectionFrom !== null && isTypeSearch) {
|
||||
const orig = query.proposeCorrectionFrom;
|
||||
const targ = query.proposeCorrectionTo;
|
||||
correctionOutput = "<h3 class=\"search-corrections\">" +
|
||||
`Type "${orig}" not found and used as generic parameter. ` +
|
||||
`Consider searching for "${targ}" instead.</h3>`;
|
||||
}
|
||||
if (firstResult.value) {
|
||||
if (correctionOutput !== "") {
|
||||
const h3 = document.createElement("h3");
|
||||
h3.innerHTML = correctionOutput;
|
||||
output.appendChild(h3);
|
||||
}
|
||||
await addNextResultToOutput(firstResult.value);
|
||||
} else {
|
||||
output = document.createElement("div");
|
||||
if (correctionOutput !== "") {
|
||||
const h3 = document.createElement("h3");
|
||||
h3.innerHTML = correctionOutput;
|
||||
output.appendChild(h3);
|
||||
}
|
||||
output.className = "search-failed" + extraClass;
|
||||
output.innerHTML = "No results :(<br/>" +
|
||||
const dlroChannel = `https://doc.rust-lang.org/${getVar("channel")}`;
|
||||
if (query.userQuery !== "") {
|
||||
output.innerHTML += "No results :(<br/>" +
|
||||
"Try on <a href=\"https://duckduckgo.com/?q=" +
|
||||
encodeURIComponent("rust " + query.userQuery) +
|
||||
"\">DuckDuckGo</a>?<br/><br/>" +
|
||||
@@ -4929,192 +4807,198 @@ ${item.displayPath}<span class="${type}">${name}</span>\
|
||||
"introductions to language features and the language itself.</li><li><a " +
|
||||
"href=\"https://docs.rs\">Docs.rs</a> for documentation of crates released on" +
|
||||
" <a href=\"https://crates.io/\">crates.io</a>.</li></ul>";
|
||||
}
|
||||
output.innerHTML += "Example searches:<ul>" +
|
||||
"<li><a href=\"" + getNakedUrl() + "?search=std::vec\">std::vec</a></li>" +
|
||||
"<li><a href=\"" + getNakedUrl() + "?search=u32+->+bool\">u32 -> bool</a></li>" +
|
||||
"<li><a href=\"" + getNakedUrl() + "?search=Option<T>,+(T+->+U)+->+Option<U>\">" +
|
||||
"Option<T>, (T -> U) -> Option<U></a></li>" +
|
||||
"</ul>";
|
||||
// need to make sure the element is shown before
|
||||
// running this callback
|
||||
yieldToBrowser().then(() => finishedCallback(0, output));
|
||||
}
|
||||
return output;
|
||||
}
|
||||
|
||||
// @ts-expect-error
|
||||
function makeTabHeader(tabNb, text, nbElems) {
|
||||
// https://blog.horizon-eda.org/misc/2020/02/19/ui.html
|
||||
//
|
||||
// CSS runs with `font-variant-numeric: tabular-nums` to ensure all
|
||||
// digits are the same width. \u{2007} is a Unicode space character
|
||||
// that is defined to be the same width as a digit.
|
||||
const fmtNbElems =
|
||||
nbElems < 10 ? `\u{2007}(${nbElems})\u{2007}\u{2007}` :
|
||||
nbElems < 100 ? `\u{2007}(${nbElems})\u{2007}` : `\u{2007}(${nbElems})`;
|
||||
if (searchState.currentTab === tabNb) {
|
||||
return "<button class=\"selected\">" + text +
|
||||
"<span class=\"count\">" + fmtNbElems + "</span></button>";
|
||||
}
|
||||
return "<button>" + text + "<span class=\"count\">" + fmtNbElems + "</span></button>";
|
||||
/**
|
||||
* returns [tab, output]
|
||||
* @param {number} tabNb
|
||||
* @param {string} text
|
||||
* @param {AsyncGenerator<rustdoc.ResultObject>} results
|
||||
* @param {rustdoc.ParsedQuery<rustdoc.ParserQueryElement>} query
|
||||
* @param {boolean} isTypeSearch
|
||||
* @param {boolean} goToFirst
|
||||
* @returns {[HTMLElement, Promise<HTMLElement>]}
|
||||
*/
|
||||
function makeTab(tabNb, text, results, query, isTypeSearch, goToFirst) {
|
||||
const isCurrentTab = window.searchState.currentTab === tabNb;
|
||||
const tabButton = document.createElement("button");
|
||||
tabButton.appendChild(document.createTextNode(text));
|
||||
tabButton.className = isCurrentTab ? "selected" : "";
|
||||
const tabCount = document.createElement("span");
|
||||
tabCount.className = "count loading";
|
||||
tabCount.innerHTML = "\u{2007}(\u{2007})\u{2007}\u{2007}";
|
||||
tabButton.appendChild(tabCount);
|
||||
return [
|
||||
tabButton,
|
||||
addTab(results, query, isCurrentTab, (count, output) => {
|
||||
const search = window.searchState.outputElement();
|
||||
const error = query.error;
|
||||
if (count === 0 && error !== null && search) {
|
||||
error.forEach((value, index) => {
|
||||
value = value.split("<").join("<").split(">").join(">");
|
||||
if (index % 2 !== 0) {
|
||||
error[index] = `<code>${value.replaceAll(" ", " ")}</code>`;
|
||||
} else {
|
||||
error[index] = value;
|
||||
}
|
||||
});
|
||||
const errorReport = document.createElement("h3");
|
||||
errorReport.className = "error";
|
||||
errorReport.innerHTML = `Query parser error: "${error.join("")}".`;
|
||||
search.insertBefore(errorReport, search.firstElementChild);
|
||||
} else if (goToFirst ||
|
||||
(count === 1 && getSettingValue("go-to-only-result") === "true")
|
||||
) {
|
||||
// Needed to force re-execution of JS when coming back to a page. Let's take this
|
||||
// scenario as example:
|
||||
//
|
||||
// 1. You have the "Directly go to item in search if there is only one result"
|
||||
// option enabled.
|
||||
// 2. You make a search which results only one result, leading you automatically to
|
||||
// this result.
|
||||
// 3. You go back to previous page.
|
||||
//
|
||||
// Now, without the call below, the JS will not be re-executed and the previous
|
||||
// state will be used, starting search again since the search input is not empty,
|
||||
// leading you back to the previous page again.
|
||||
window.onunload = () => { };
|
||||
window.searchState.removeQueryParameters();
|
||||
const a = output.querySelector("a");
|
||||
if (a) {
|
||||
a.click();
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// https://blog.horizon-eda.org/misc/2020/02/19/ui.html
|
||||
//
|
||||
// CSS runs with `font-variant-numeric: tabular-nums` to ensure all
|
||||
// digits are the same width. \u{2007} is a Unicode space character
|
||||
// that is defined to be the same width as a digit.
|
||||
const fmtNbElems =
|
||||
count < 10 ? `\u{2007}(${count})\u{2007}\u{2007}` :
|
||||
count < 100 ? `\u{2007}(${count})\u{2007}` : `\u{2007}(${count})`;
|
||||
tabCount.innerHTML = fmtNbElems;
|
||||
tabCount.className = "count";
|
||||
}, isTypeSearch),
|
||||
];
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {DocSearch} docSearch
|
||||
* @param {rustdoc.ResultsTable} results
|
||||
* @param {boolean} go_to_first
|
||||
* @param {boolean} goToFirst
|
||||
* @param {string} filterCrates
|
||||
*/
|
||||
async function showResults(results, go_to_first, filterCrates) {
|
||||
const search = searchState.outputElement();
|
||||
if (go_to_first || (results.others.length === 1
|
||||
&& getSettingValue("go-to-only-result") === "true")
|
||||
) {
|
||||
// Needed to force re-execution of JS when coming back to a page. Let's take this
|
||||
// scenario as example:
|
||||
//
|
||||
// 1. You have the "Directly go to item in search if there is only one result" option
|
||||
// enabled.
|
||||
// 2. You make a search which results only one result, leading you automatically to
|
||||
// this result.
|
||||
// 3. You go back to previous page.
|
||||
//
|
||||
// Now, without the call below, the JS will not be re-executed and the previous state
|
||||
// will be used, starting search again since the search input is not empty, leading you
|
||||
// back to the previous page again.
|
||||
window.onunload = () => { };
|
||||
searchState.removeQueryParameters();
|
||||
const elem = document.createElement("a");
|
||||
elem.href = results.others[0].href;
|
||||
removeClass(elem, "active");
|
||||
// For firefox, we need the element to be in the DOM so it can be clicked.
|
||||
document.body.appendChild(elem);
|
||||
elem.click();
|
||||
async function showResults(docSearch, results, goToFirst, filterCrates) {
|
||||
const search = window.searchState.outputElement();
|
||||
|
||||
if (!search) {
|
||||
return;
|
||||
}
|
||||
if (results.query === undefined) {
|
||||
// @ts-expect-error
|
||||
results.query = DocSearch.parseQuery(searchState.input.value);
|
||||
}
|
||||
|
||||
currentResults = results.query.userQuery;
|
||||
|
||||
// Navigate to the relevant tab if the current tab is empty, like in case users search
|
||||
// for "-> String". If they had selected another tab previously, they have to click on
|
||||
// it again.
|
||||
let currentTab = searchState.currentTab;
|
||||
if ((currentTab === 0 && results.others.length === 0) ||
|
||||
(currentTab === 1 && results.in_args.length === 0) ||
|
||||
(currentTab === 2 && results.returned.length === 0)) {
|
||||
if (results.others.length !== 0) {
|
||||
currentTab = 0;
|
||||
} else if (results.in_args.length) {
|
||||
currentTab = 1;
|
||||
} else if (results.returned.length) {
|
||||
currentTab = 2;
|
||||
}
|
||||
}
|
||||
|
||||
let crates = "";
|
||||
if (rawSearchIndex.size > 1) {
|
||||
crates = "<div class=\"sub-heading\"> in <div id=\"crate-search-div\">" +
|
||||
const crateNames = await docSearch.getCrateNameList();
|
||||
if (crateNames.length > 1) {
|
||||
crates = " in <div id=\"crate-search-div\">" +
|
||||
"<select id=\"crate-search\"><option value=\"all crates\">all crates</option>";
|
||||
for (const c of rawSearchIndex.keys()) {
|
||||
const l = crateNames.length;
|
||||
for (let i = 0; i < l; i += 1) {
|
||||
const c = crateNames[i];
|
||||
crates += `<option value="${c}" ${c === filterCrates && "selected"}>${c}</option>`;
|
||||
}
|
||||
crates += "</select></div></div>";
|
||||
crates += "</select></div>";
|
||||
}
|
||||
nonnull(document.querySelector(".search-switcher")).innerHTML = `Search results${crates}`;
|
||||
|
||||
let output = `<div class="main-heading">\
|
||||
<h1 class="search-results-title">Results</h1>${crates}</div>`;
|
||||
/** @type {[HTMLElement, Promise<HTMLElement>][]} */
|
||||
const tabs = [];
|
||||
searchState.currentTab = 0;
|
||||
if (results.query.error !== null) {
|
||||
const error = results.query.error;
|
||||
// @ts-expect-error
|
||||
error.forEach((value, index) => {
|
||||
value = value.split("<").join("<").split(">").join(">");
|
||||
if (index % 2 !== 0) {
|
||||
error[index] = `<code>${value.replaceAll(" ", " ")}</code>`;
|
||||
} else {
|
||||
error[index] = value;
|
||||
}
|
||||
});
|
||||
output += `<h3 class="error">Query parser error: "${error.join("")}".</h3>`;
|
||||
output += "<div id=\"search-tabs\">" +
|
||||
makeTabHeader(0, "In Names", results.others.length) +
|
||||
"</div>";
|
||||
currentTab = 0;
|
||||
} else if (results.query.foundElems <= 1 && results.query.returned.length === 0) {
|
||||
output += "<div id=\"search-tabs\">" +
|
||||
makeTabHeader(0, "In Names", results.others.length) +
|
||||
makeTabHeader(1, "In Parameters", results.in_args.length) +
|
||||
makeTabHeader(2, "In Return Types", results.returned.length) +
|
||||
"</div>";
|
||||
tabs.push(makeTab(0, "In Names", results.others, results.query, false, goToFirst));
|
||||
} else if (
|
||||
results.query.foundElems <= 1 &&
|
||||
results.query.returned.length === 0 &&
|
||||
!results.query.hasReturnArrow
|
||||
) {
|
||||
tabs.push(makeTab(0, "In Names", results.others, results.query, false, goToFirst));
|
||||
tabs.push(makeTab(1, "In Parameters", results.in_args, results.query, true, false));
|
||||
tabs.push(makeTab(2, "In Return Types", results.returned, results.query, true, false));
|
||||
} else {
|
||||
const signatureTabTitle =
|
||||
results.query.elems.length === 0 ? "In Function Return Types" :
|
||||
results.query.returned.length === 0 ? "In Function Parameters" :
|
||||
"In Function Signatures";
|
||||
output += "<div id=\"search-tabs\">" +
|
||||
makeTabHeader(0, signatureTabTitle, results.others.length) +
|
||||
"</div>";
|
||||
currentTab = 0;
|
||||
tabs.push(makeTab(0, signatureTabTitle, results.others, results.query, true, goToFirst));
|
||||
}
|
||||
|
||||
if (results.query.correction !== null) {
|
||||
const orig = results.query.returned.length > 0
|
||||
? results.query.returned[0].name
|
||||
: results.query.elems[0].name;
|
||||
output += "<h3 class=\"search-corrections\">" +
|
||||
`Type "${orig}" not found. ` +
|
||||
"Showing results for closest type name " +
|
||||
`"${results.query.correction}" instead.</h3>`;
|
||||
}
|
||||
if (results.query.proposeCorrectionFrom !== null) {
|
||||
const orig = results.query.proposeCorrectionFrom;
|
||||
const targ = results.query.proposeCorrectionTo;
|
||||
output += "<h3 class=\"search-corrections\">" +
|
||||
`Type "${orig}" not found and used as generic parameter. ` +
|
||||
`Consider searching for "${targ}" instead.</h3>`;
|
||||
}
|
||||
|
||||
const [ret_others, ret_in_args, ret_returned] = await Promise.all([
|
||||
addTab(results.others, results.query, currentTab === 0),
|
||||
addTab(results.in_args, results.query, currentTab === 1),
|
||||
addTab(results.returned, results.query, currentTab === 2),
|
||||
]);
|
||||
const tabsElem = document.createElement("div");
|
||||
tabsElem.id = "search-tabs";
|
||||
|
||||
const resultsElem = document.createElement("div");
|
||||
resultsElem.id = "results";
|
||||
resultsElem.appendChild(ret_others);
|
||||
resultsElem.appendChild(ret_in_args);
|
||||
resultsElem.appendChild(ret_returned);
|
||||
|
||||
// @ts-expect-error
|
||||
search.innerHTML = output;
|
||||
if (searchState.rustdocToolbar) {
|
||||
// @ts-expect-error
|
||||
search.querySelector(".main-heading").appendChild(searchState.rustdocToolbar);
|
||||
search.innerHTML = "";
|
||||
for (const [tab, output] of tabs) {
|
||||
tabsElem.appendChild(tab);
|
||||
const placeholder = document.createElement("div");
|
||||
output.then(output => {
|
||||
if (placeholder.parentElement) {
|
||||
placeholder.parentElement.replaceChild(output, placeholder);
|
||||
}
|
||||
});
|
||||
resultsElem.appendChild(placeholder);
|
||||
}
|
||||
|
||||
if (window.searchState.rustdocToolbar) {
|
||||
nonnull(
|
||||
nonnull(window.searchState.containerElement())
|
||||
.querySelector(".main-heading"),
|
||||
).appendChild(window.searchState.rustdocToolbar);
|
||||
}
|
||||
const crateSearch = document.getElementById("crate-search");
|
||||
if (crateSearch) {
|
||||
crateSearch.addEventListener("input", updateCrate);
|
||||
}
|
||||
// @ts-expect-error
|
||||
search.appendChild(tabsElem);
|
||||
search.appendChild(resultsElem);
|
||||
// Reset focused elements.
|
||||
searchState.showResults(search);
|
||||
// @ts-expect-error
|
||||
const elems = document.getElementById("search-tabs").childNodes;
|
||||
// @ts-expect-error
|
||||
searchState.focusedByTab = [];
|
||||
window.searchState.showResults();
|
||||
window.searchState.focusedByTab = [null, null, null];
|
||||
let i = 0;
|
||||
for (const elem of elems) {
|
||||
for (const elem of tabsElem.childNodes) {
|
||||
const j = i;
|
||||
// @ts-expect-error
|
||||
elem.onclick = () => printTab(j);
|
||||
searchState.focusedByTab.push(null);
|
||||
window.searchState.focusedByTab[i] = null;
|
||||
i += 1;
|
||||
}
|
||||
printTab(currentTab);
|
||||
printTab(0);
|
||||
}
|
||||
|
||||
// @ts-expect-error
|
||||
function updateSearchHistory(url) {
|
||||
const btn = document.querySelector("#search-button a");
|
||||
if (btn instanceof HTMLAnchorElement) {
|
||||
btn.href = url;
|
||||
}
|
||||
if (!browserSupportsHistoryApi()) {
|
||||
return;
|
||||
}
|
||||
const params = searchState.getQueryStringParams();
|
||||
if (!history.state && !params.search) {
|
||||
if (!history.state && params.search === undefined) {
|
||||
history.pushState(null, "", url);
|
||||
} else {
|
||||
history.replaceState(null, "", url);
|
||||
@@ -5127,8 +5011,8 @@ function updateSearchHistory(url) {
|
||||
* @param {boolean} [forced]
|
||||
*/
|
||||
async function search(forced) {
|
||||
// @ts-expect-error
|
||||
const query = DocSearch.parseQuery(searchState.input.value.trim());
|
||||
const query = DocSearch.parseQuery(nonnull(window.searchState.inputElement()).value.trim());
|
||||
|
||||
let filterCrates = getFilterCrates();
|
||||
|
||||
// @ts-expect-error
|
||||
@@ -5138,6 +5022,7 @@ async function search(forced) {
|
||||
}
|
||||
return;
|
||||
}
|
||||
currentResults = query.userQuery;
|
||||
|
||||
searchState.setLoadingSearch();
|
||||
|
||||
@@ -5149,6 +5034,12 @@ async function search(forced) {
|
||||
filterCrates = params["filter-crate"];
|
||||
}
|
||||
|
||||
if (filterCrates !== null &&
|
||||
(await docSearch.getCrateNameList()).indexOf(filterCrates) === -1
|
||||
) {
|
||||
filterCrates = null;
|
||||
}
|
||||
|
||||
// Update document title to maintain a meaningful browser history
|
||||
searchState.title = "\"" + query.userQuery + "\" Search - Rust";
|
||||
|
||||
@@ -5157,6 +5048,7 @@ async function search(forced) {
|
||||
updateSearchHistory(buildUrl(query.userQuery, filterCrates));
|
||||
|
||||
await showResults(
|
||||
docSearch,
|
||||
// @ts-expect-error
|
||||
await docSearch.execQuery(query, filterCrates, window.currentCrate),
|
||||
params.go_to_first,
|
||||
@@ -5176,16 +5068,14 @@ function onSearchSubmit(e) {
|
||||
}
|
||||
|
||||
function putBackSearch() {
|
||||
const search_input = searchState.input;
|
||||
if (!searchState.input) {
|
||||
const search_input = window.searchState.inputElement();
|
||||
if (!search_input) {
|
||||
return;
|
||||
}
|
||||
// @ts-expect-error
|
||||
if (search_input.value !== "" && !searchState.isDisplayed()) {
|
||||
searchState.showResults();
|
||||
if (browserSupportsHistoryApi()) {
|
||||
history.replaceState(null, "",
|
||||
// @ts-expect-error
|
||||
buildUrl(search_input.value, getFilterCrates()));
|
||||
}
|
||||
document.title = searchState.title;
|
||||
@@ -5199,30 +5089,21 @@ function registerSearchEvents() {
|
||||
// but only if the input bar is empty. This avoid the obnoxious issue
|
||||
// where you start trying to do a search, and the index loads, and
|
||||
// suddenly your search is gone!
|
||||
// @ts-expect-error
|
||||
if (searchState.input.value === "") {
|
||||
// @ts-expect-error
|
||||
searchState.input.value = params.search || "";
|
||||
const inputElement = nonnull(window.searchState.inputElement());
|
||||
if (inputElement.value === "") {
|
||||
inputElement.value = params.search || "";
|
||||
}
|
||||
|
||||
const searchAfter500ms = () => {
|
||||
searchState.clearInputTimeout();
|
||||
// @ts-expect-error
|
||||
if (searchState.input.value.length === 0) {
|
||||
searchState.hideResults();
|
||||
} else {
|
||||
// @ts-ignore
|
||||
searchState.timeout = setTimeout(search, 500);
|
||||
}
|
||||
window.searchState.timeout = setTimeout(search, 500);
|
||||
};
|
||||
// @ts-expect-error
|
||||
searchState.input.onkeyup = searchAfter500ms;
|
||||
// @ts-expect-error
|
||||
searchState.input.oninput = searchAfter500ms;
|
||||
// @ts-expect-error
|
||||
document.getElementsByClassName("search-form")[0].onsubmit = onSearchSubmit;
|
||||
// @ts-expect-error
|
||||
searchState.input.onchange = e => {
|
||||
inputElement.onkeyup = searchAfter500ms;
|
||||
inputElement.oninput = searchAfter500ms;
|
||||
if (inputElement.form) {
|
||||
inputElement.form.onsubmit = onSearchSubmit;
|
||||
}
|
||||
inputElement.onchange = e => {
|
||||
if (e.target !== document.activeElement) {
|
||||
// To prevent doing anything when it's from a blur event.
|
||||
return;
|
||||
@@ -5234,11 +5115,13 @@ function registerSearchEvents() {
|
||||
// change, though.
|
||||
setTimeout(search, 0);
|
||||
};
|
||||
// @ts-expect-error
|
||||
searchState.input.onpaste = searchState.input.onchange;
|
||||
inputElement.onpaste = inputElement.onchange;
|
||||
|
||||
// @ts-expect-error
|
||||
searchState.outputElement().addEventListener("keydown", e => {
|
||||
if (!(e instanceof KeyboardEvent)) {
|
||||
return;
|
||||
}
|
||||
// We only handle unmodified keystrokes here. We don't want to interfere with,
|
||||
// for instance, alt-left and alt-right for history navigation.
|
||||
if (e.altKey || e.ctrlKey || e.shiftKey || e.metaKey) {
|
||||
@@ -5278,88 +5161,23 @@ function registerSearchEvents() {
|
||||
}
|
||||
});
|
||||
|
||||
// @ts-expect-error
|
||||
searchState.input.addEventListener("keydown", e => {
|
||||
inputElement.addEventListener("keydown", e => {
|
||||
if (e.which === 40) { // down
|
||||
focusSearchResult();
|
||||
e.preventDefault();
|
||||
}
|
||||
});
|
||||
|
||||
// @ts-expect-error
|
||||
searchState.input.addEventListener("focus", () => {
|
||||
inputElement.addEventListener("focus", () => {
|
||||
putBackSearch();
|
||||
});
|
||||
|
||||
// @ts-expect-error
|
||||
searchState.input.addEventListener("blur", () => {
|
||||
if (window.searchState.input) {
|
||||
window.searchState.input.placeholder = window.searchState.origPlaceholder;
|
||||
}
|
||||
});
|
||||
|
||||
// Push and pop states are used to add search results to the browser
|
||||
// history.
|
||||
if (browserSupportsHistoryApi()) {
|
||||
// Store the previous <title> so we can revert back to it later.
|
||||
const previousTitle = document.title;
|
||||
|
||||
window.addEventListener("popstate", e => {
|
||||
const params = searchState.getQueryStringParams();
|
||||
// Revert to the previous title manually since the History
|
||||
// API ignores the title parameter.
|
||||
document.title = previousTitle;
|
||||
// When browsing forward to search results the previous
|
||||
// search will be repeated, so the currentResults are
|
||||
// cleared to ensure the search is successful.
|
||||
currentResults = null;
|
||||
// Synchronize search bar with query string state and
|
||||
// perform the search. This will empty the bar if there's
|
||||
// nothing there, which lets you really go back to a
|
||||
// previous state with nothing in the bar.
|
||||
if (params.search && params.search.length > 0) {
|
||||
// @ts-expect-error
|
||||
searchState.input.value = params.search;
|
||||
// Some browsers fire "onpopstate" for every page load
|
||||
// (Chrome), while others fire the event only when actually
|
||||
// popping a state (Firefox), which is why search() is
|
||||
// called both here and at the end of the startSearch()
|
||||
// function.
|
||||
e.preventDefault();
|
||||
search();
|
||||
} else {
|
||||
// @ts-expect-error
|
||||
searchState.input.value = "";
|
||||
// When browsing back from search results the main page
|
||||
// visibility must be reset.
|
||||
searchState.hideResults();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// This is required in firefox to avoid this problem: Navigating to a search result
|
||||
// with the keyboard, hitting enter, and then hitting back would take you back to
|
||||
// the doc page, rather than the search that should overlay it.
|
||||
// This was an interaction between the back-forward cache and our handlers
|
||||
// that try to sync state between the URL and the search input. To work around it,
|
||||
// do a small amount of re-init on page show.
|
||||
window.onpageshow = () => {
|
||||
const qSearch = searchState.getQueryStringParams().search;
|
||||
// @ts-expect-error
|
||||
if (searchState.input.value === "" && qSearch) {
|
||||
// @ts-expect-error
|
||||
searchState.input.value = qSearch;
|
||||
}
|
||||
search();
|
||||
};
|
||||
}
|
||||
|
||||
// @ts-expect-error
|
||||
function updateCrate(ev) {
|
||||
if (ev.target.value === "all crates") {
|
||||
// If we don't remove it from the URL, it'll be picked up again by the search.
|
||||
// @ts-expect-error
|
||||
const query = searchState.input.value.trim();
|
||||
const query = nonnull(window.searchState.inputElement()).value.trim();
|
||||
updateSearchHistory(buildUrl(query, null));
|
||||
}
|
||||
// In case you "cut" the entry from the search input, then change the crate filter
|
||||
@@ -5369,522 +5187,91 @@ function updateCrate(ev) {
|
||||
search(true);
|
||||
}
|
||||
|
||||
// Parts of this code are based on Lucene, which is licensed under the
|
||||
// Apache/2.0 license.
|
||||
// More information found here:
|
||||
// https://fossies.org/linux/lucene/lucene/core/src/java/org/apache/lucene/util/automaton/
|
||||
// LevenshteinAutomata.java
|
||||
class ParametricDescription {
|
||||
// @ts-expect-error
|
||||
constructor(w, n, minErrors) {
|
||||
this.w = w;
|
||||
this.n = n;
|
||||
this.minErrors = minErrors;
|
||||
}
|
||||
// @ts-expect-error
|
||||
isAccept(absState) {
|
||||
const state = Math.floor(absState / (this.w + 1));
|
||||
const offset = absState % (this.w + 1);
|
||||
return this.w - offset + this.minErrors[state] <= this.n;
|
||||
}
|
||||
// @ts-expect-error
|
||||
getPosition(absState) {
|
||||
return absState % (this.w + 1);
|
||||
}
|
||||
// @ts-expect-error
|
||||
getVector(name, charCode, pos, end) {
|
||||
let vector = 0;
|
||||
for (let i = pos; i < end; i += 1) {
|
||||
vector = vector << 1;
|
||||
if (name.charCodeAt(i) === charCode) {
|
||||
vector |= 1;
|
||||
}
|
||||
}
|
||||
return vector;
|
||||
}
|
||||
// @ts-expect-error
|
||||
unpack(data, index, bitsPerValue) {
|
||||
const bitLoc = (bitsPerValue * index);
|
||||
const dataLoc = bitLoc >> 5;
|
||||
const bitStart = bitLoc & 31;
|
||||
if (bitStart + bitsPerValue <= 32) {
|
||||
// not split
|
||||
return ((data[dataLoc] >> bitStart) & this.MASKS[bitsPerValue - 1]);
|
||||
} else {
|
||||
// split
|
||||
const part = 32 - bitStart;
|
||||
return ~~(((data[dataLoc] >> bitStart) & this.MASKS[part - 1]) +
|
||||
((data[1 + dataLoc] & this.MASKS[bitsPerValue - part - 1]) << part));
|
||||
}
|
||||
// eslint-disable-next-line max-len
|
||||
// polyfill https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Uint8Array/fromBase64
|
||||
/**
|
||||
* @type {function(string): Uint8Array} base64
|
||||
*/
|
||||
//@ts-expect-error
|
||||
const makeUint8ArrayFromBase64 = Uint8Array.fromBase64 ? Uint8Array.fromBase64 : (string => {
|
||||
const bytes_as_string = atob(string);
|
||||
const l = bytes_as_string.length;
|
||||
const bytes = new Uint8Array(l);
|
||||
for (let i = 0; i < l; ++i) {
|
||||
bytes[i] = bytes_as_string.charCodeAt(i);
|
||||
}
|
||||
return bytes;
|
||||
});
|
||||
|
||||
|
||||
if (ROOT_PATH === null) {
|
||||
return;
|
||||
}
|
||||
ParametricDescription.prototype.MASKS = new Int32Array([
|
||||
0x1, 0x3, 0x7, 0xF,
|
||||
0x1F, 0x3F, 0x7F, 0xFF,
|
||||
0x1FF, 0x3F, 0x7FF, 0xFFF,
|
||||
0x1FFF, 0x3FFF, 0x7FFF, 0xFFFF,
|
||||
0x1FFFF, 0x3FFFF, 0x7FFFF, 0xFFFFF,
|
||||
0x1FFFFF, 0x3FFFFF, 0x7FFFFF, 0xFFFFFF,
|
||||
0x1FFFFFF, 0x3FFFFFF, 0x7FFFFFF, 0xFFFFFFF,
|
||||
0x1FFFFFFF, 0x3FFFFFFF, 0x7FFFFFFF, 0xFFFFFFFF,
|
||||
]);
|
||||
|
||||
// The following code was generated with the moman/finenight pkg
|
||||
// This package is available under the MIT License, see NOTICE.txt
|
||||
// for more details.
|
||||
// This class is auto-generated, Please do not modify it directly.
|
||||
// You should modify the https://gitlab.com/notriddle/createAutomata.py instead.
|
||||
// The following code was generated with the moman/finenight pkg
|
||||
// This package is available under the MIT License, see NOTICE.txt
|
||||
// for more details.
|
||||
// This class is auto-generated, Please do not modify it directly.
|
||||
// You should modify https://gitlab.com/notriddle/moman-rustdoc instead.
|
||||
|
||||
class Lev2TParametricDescription extends ParametricDescription {
|
||||
/**
|
||||
* @param {number} absState
|
||||
* @param {number} position
|
||||
* @param {number} vector
|
||||
* @returns {number}
|
||||
*/
|
||||
transition(absState, position, vector) {
|
||||
let state = Math.floor(absState / (this.w + 1));
|
||||
let offset = absState % (this.w + 1);
|
||||
|
||||
if (position === this.w) {
|
||||
if (state < 3) { // eslint-disable-line no-lonely-if
|
||||
const loc = Math.imul(vector, 3) + state;
|
||||
offset += this.unpack(this.offsetIncrs0, loc, 1);
|
||||
state = this.unpack(this.toStates0, loc, 2) - 1;
|
||||
}
|
||||
} else if (position === this.w - 1) {
|
||||
if (state < 5) { // eslint-disable-line no-lonely-if
|
||||
const loc = Math.imul(vector, 5) + state;
|
||||
offset += this.unpack(this.offsetIncrs1, loc, 1);
|
||||
state = this.unpack(this.toStates1, loc, 3) - 1;
|
||||
}
|
||||
} else if (position === this.w - 2) {
|
||||
if (state < 13) { // eslint-disable-line no-lonely-if
|
||||
const loc = Math.imul(vector, 13) + state;
|
||||
offset += this.unpack(this.offsetIncrs2, loc, 2);
|
||||
state = this.unpack(this.toStates2, loc, 4) - 1;
|
||||
}
|
||||
} else if (position === this.w - 3) {
|
||||
if (state < 28) { // eslint-disable-line no-lonely-if
|
||||
const loc = Math.imul(vector, 28) + state;
|
||||
offset += this.unpack(this.offsetIncrs3, loc, 2);
|
||||
state = this.unpack(this.toStates3, loc, 5) - 1;
|
||||
}
|
||||
} else if (position === this.w - 4) {
|
||||
if (state < 45) { // eslint-disable-line no-lonely-if
|
||||
const loc = Math.imul(vector, 45) + state;
|
||||
offset += this.unpack(this.offsetIncrs4, loc, 3);
|
||||
state = this.unpack(this.toStates4, loc, 6) - 1;
|
||||
}
|
||||
} else {
|
||||
if (state < 45) { // eslint-disable-line no-lonely-if
|
||||
const loc = Math.imul(vector, 45) + state;
|
||||
offset += this.unpack(this.offsetIncrs5, loc, 3);
|
||||
state = this.unpack(this.toStates5, loc, 6) - 1;
|
||||
}
|
||||
}
|
||||
|
||||
if (state === -1) {
|
||||
// null state
|
||||
return -1;
|
||||
} else {
|
||||
// translate back to abs
|
||||
return Math.imul(state, this.w + 1) + offset;
|
||||
}
|
||||
}
|
||||
|
||||
// state map
|
||||
// 0 -> [(0, 0)]
|
||||
// 1 -> [(0, 1)]
|
||||
// 2 -> [(0, 2)]
|
||||
// 3 -> [(0, 1), (1, 1)]
|
||||
// 4 -> [(0, 2), (1, 2)]
|
||||
// 5 -> [(0, 1), (1, 1), (2, 1)]
|
||||
// 6 -> [(0, 2), (1, 2), (2, 2)]
|
||||
// 7 -> [(0, 1), (2, 1)]
|
||||
// 8 -> [(0, 1), (2, 2)]
|
||||
// 9 -> [(0, 2), (2, 1)]
|
||||
// 10 -> [(0, 2), (2, 2)]
|
||||
// 11 -> [t(0, 1), (0, 1), (1, 1), (2, 1)]
|
||||
// 12 -> [t(0, 2), (0, 2), (1, 2), (2, 2)]
|
||||
// 13 -> [(0, 2), (1, 2), (2, 2), (3, 2)]
|
||||
// 14 -> [(0, 1), (1, 1), (3, 2)]
|
||||
// 15 -> [(0, 1), (2, 2), (3, 2)]
|
||||
// 16 -> [(0, 1), (3, 2)]
|
||||
// 17 -> [(0, 1), t(1, 2), (2, 2), (3, 2)]
|
||||
// 18 -> [(0, 2), (1, 2), (3, 1)]
|
||||
// 19 -> [(0, 2), (1, 2), (3, 2)]
|
||||
// 20 -> [(0, 2), (1, 2), t(1, 2), (2, 2), (3, 2)]
|
||||
// 21 -> [(0, 2), (2, 1), (3, 1)]
|
||||
// 22 -> [(0, 2), (2, 2), (3, 2)]
|
||||
// 23 -> [(0, 2), (3, 1)]
|
||||
// 24 -> [(0, 2), (3, 2)]
|
||||
// 25 -> [(0, 2), t(1, 2), (1, 2), (2, 2), (3, 2)]
|
||||
// 26 -> [t(0, 2), (0, 2), (1, 2), (2, 2), (3, 2)]
|
||||
// 27 -> [t(0, 2), (0, 2), (1, 2), (3, 1)]
|
||||
// 28 -> [(0, 2), (1, 2), (2, 2), (3, 2), (4, 2)]
|
||||
// 29 -> [(0, 2), (1, 2), (2, 2), (4, 2)]
|
||||
// 30 -> [(0, 2), (1, 2), (2, 2), t(2, 2), (3, 2), (4, 2)]
|
||||
// 31 -> [(0, 2), (1, 2), (3, 2), (4, 2)]
|
||||
// 32 -> [(0, 2), (1, 2), (4, 2)]
|
||||
// 33 -> [(0, 2), (1, 2), t(1, 2), (2, 2), (3, 2), (4, 2)]
|
||||
// 34 -> [(0, 2), (1, 2), t(2, 2), (2, 2), (3, 2), (4, 2)]
|
||||
// 35 -> [(0, 2), (2, 1), (4, 2)]
|
||||
// 36 -> [(0, 2), (2, 2), (3, 2), (4, 2)]
|
||||
// 37 -> [(0, 2), (2, 2), (4, 2)]
|
||||
// 38 -> [(0, 2), (3, 2), (4, 2)]
|
||||
// 39 -> [(0, 2), (4, 2)]
|
||||
// 40 -> [(0, 2), t(1, 2), (1, 2), (2, 2), (3, 2), (4, 2)]
|
||||
// 41 -> [(0, 2), t(2, 2), (2, 2), (3, 2), (4, 2)]
|
||||
// 42 -> [t(0, 2), (0, 2), (1, 2), (2, 2), (3, 2), (4, 2)]
|
||||
// 43 -> [t(0, 2), (0, 2), (1, 2), (2, 2), (4, 2)]
|
||||
// 44 -> [t(0, 2), (0, 2), (1, 2), (2, 2), t(2, 2), (3, 2), (4, 2)]
|
||||
|
||||
|
||||
/** @param {number} w - length of word being checked */
|
||||
constructor(w) {
|
||||
super(w, 2, new Int32Array([
|
||||
0,1,2,0,1,-1,0,-1,0,-1,0,-1,0,-1,-1,-1,-1,-1,-2,-1,-1,-2,-1,-2,
|
||||
-1,-1,-1,-2,-2,-2,-2,-2,-2,-2,-2,-2,-2,-2,-2,-2,-2,-2,-2,-2,-2,
|
||||
]));
|
||||
const database = await Stringdex.loadDatabase(hooks);
|
||||
if (typeof window !== "undefined") {
|
||||
docSearch = new DocSearch(ROOT_PATH, database);
|
||||
await docSearch.buildIndex();
|
||||
onEachLazy(document.querySelectorAll(
|
||||
".search-form.loading",
|
||||
), form => {
|
||||
removeClass(form, "loading");
|
||||
});
|
||||
registerSearchEvents();
|
||||
// If there's a search term in the URL, execute the search now.
|
||||
if (window.searchState.getQueryStringParams().search !== undefined) {
|
||||
search();
|
||||
}
|
||||
} else if (typeof exports !== "undefined") {
|
||||
docSearch = new DocSearch(ROOT_PATH, database);
|
||||
await docSearch.buildIndex();
|
||||
return { docSearch, DocSearch };
|
||||
}
|
||||
|
||||
Lev2TParametricDescription.prototype.toStates0 = /*2 bits per value */ new Int32Array([
|
||||
0xe,
|
||||
]);
|
||||
Lev2TParametricDescription.prototype.offsetIncrs0 = /*1 bits per value */ new Int32Array([
|
||||
0x0,
|
||||
]);
|
||||
|
||||
Lev2TParametricDescription.prototype.toStates1 = /*3 bits per value */ new Int32Array([
|
||||
0x1a688a2c,
|
||||
]);
|
||||
Lev2TParametricDescription.prototype.offsetIncrs1 = /*1 bits per value */ new Int32Array([
|
||||
0x3e0,
|
||||
]);
|
||||
|
||||
Lev2TParametricDescription.prototype.toStates2 = /*4 bits per value */ new Int32Array([
|
||||
0x70707054,0xdc07035,0x3dd3a3a,0x2323213a,
|
||||
0x15435223,0x22545432,0x5435,
|
||||
]);
|
||||
Lev2TParametricDescription.prototype.offsetIncrs2 = /*2 bits per value */ new Int32Array([
|
||||
0x80000,0x55582088,0x55555555,0x55,
|
||||
]);
|
||||
|
||||
Lev2TParametricDescription.prototype.toStates3 = /*5 bits per value */ new Int32Array([
|
||||
0x1c0380a4,0x700a570,0xca529c0,0x180a00,
|
||||
0xa80af180,0xc5498e60,0x5a546398,0x8c4300e8,
|
||||
0xac18c601,0xd8d43501,0x863500ad,0x51976d6a,
|
||||
0x8ca0180a,0xc3501ac2,0xb0c5be16,0x76dda8a5,
|
||||
0x18c4519,0xc41294a,0xe248d231,0x1086520c,
|
||||
0xce31ac42,0x13946358,0x2d0348c4,0x6732d494,
|
||||
0x1ad224a5,0xd635ad4b,0x520c4139,0xce24948,
|
||||
0x22110a52,0x58ce729d,0xc41394e3,0x941cc520,
|
||||
0x90e732d4,0x4729d224,0x39ce35ad,
|
||||
]);
|
||||
Lev2TParametricDescription.prototype.offsetIncrs3 = /*2 bits per value */ new Int32Array([
|
||||
0x80000,0xc0c830,0x300f3c30,0x2200fcff,
|
||||
0xcaa00a08,0x3c2200a8,0xa8fea00a,0x55555555,
|
||||
0x55555555,0x55555555,0x55555555,0x55555555,
|
||||
0x55555555,0x55555555,
|
||||
]);
|
||||
|
||||
Lev2TParametricDescription.prototype.toStates4 = /*6 bits per value */ new Int32Array([
|
||||
0x801c0144,0x1453803,0x14700038,0xc0005145,
|
||||
0x1401,0x14,0x140000,0x0,
|
||||
0x510000,0x6301f007,0x301f00d1,0xa186178,
|
||||
0xc20ca0c3,0xc20c30,0xc30030c,0xc00c00cd,
|
||||
0xf0c00c30,0x4c054014,0xc30944c3,0x55150c34,
|
||||
0x8300550,0x430c0143,0x50c31,0xc30850c,
|
||||
0xc3143000,0x50053c50,0x5130d301,0x850d30c2,
|
||||
0x30a08608,0xc214414,0x43142145,0x21450031,
|
||||
0x1400c314,0x4c143145,0x32832803,0x28014d6c,
|
||||
0xcd34a0c3,0x1c50c76,0x1c314014,0x430c30c3,
|
||||
0x1431,0xc300500,0xca00d303,0xd36d0e40,
|
||||
0x90b0e400,0xcb2abb2c,0x70c20ca1,0x2c32ca2c,
|
||||
0xcd2c70cb,0x31c00c00,0x34c2c32c,0x5583280,
|
||||
0x558309b7,0x6cd6ca14,0x430850c7,0x51c51401,
|
||||
0x1430c714,0xc3087,0x71451450,0xca00d30,
|
||||
0xc26dc156,0xb9071560,0x1cb2abb2,0xc70c2144,
|
||||
0xb1c51ca1,0x1421c70c,0xc51c00c3,0x30811c51,
|
||||
0x24324308,0xc51031c2,0x70820820,0x5c33830d,
|
||||
0xc33850c3,0x30c30c30,0xc30c31c,0x451450c3,
|
||||
0x20c20c20,0xda0920d,0x5145914f,0x36596114,
|
||||
0x51965865,0xd9643653,0x365a6590,0x51964364,
|
||||
0x43081505,0x920b2032,0x2c718b28,0xd7242249,
|
||||
0x35cb28b0,0x2cb3872c,0x972c30d7,0xb0c32cb2,
|
||||
0x4e1c75c,0xc80c90c2,0x62ca2482,0x4504171c,
|
||||
0xd65d9610,0x33976585,0xd95cb5d,0x4b5ca5d7,
|
||||
0x73975c36,0x10308138,0xc2245105,0x41451031,
|
||||
0x14e24208,0xc35c3387,0x51453851,0x1c51c514,
|
||||
0xc70c30c3,0x20451450,0x14f1440c,0x4f0da092,
|
||||
0x4513d41,0x6533944d,0x1350e658,0xe1545055,
|
||||
0x64365a50,0x5519383,0x51030815,0x28920718,
|
||||
0x441c718b,0x714e2422,0x1c35cb28,0x4e1c7387,
|
||||
0xb28e1c51,0x5c70c32c,0xc204e1c7,0x81c61440,
|
||||
0x1c62ca24,0xd04503ce,0x85d63944,0x39338e65,
|
||||
0x8e154387,0x364b5ca3,0x38739738,
|
||||
]);
|
||||
Lev2TParametricDescription.prototype.offsetIncrs4 = /*3 bits per value */ new Int32Array([
|
||||
0x10000000,0xc00000,0x60061,0x400,
|
||||
0x0,0x80010008,0x249248a4,0x8229048,
|
||||
0x2092,0x6c3603,0xb61b6c30,0x6db6036d,
|
||||
0xdb6c0,0x361b0180,0x91b72000,0xdb11b71b,
|
||||
0x6db6236,0x1008200,0x12480012,0x24924906,
|
||||
0x48200049,0x80410002,0x24000900,0x4924a489,
|
||||
0x10822492,0x20800125,0x48360,0x9241b692,
|
||||
0x6da4924,0x40009268,0x241b010,0x291b4900,
|
||||
0x6d249249,0x49493423,0x92492492,0x24924924,
|
||||
0x49249249,0x92492492,0x24924924,0x49249249,
|
||||
0x92492492,0x24924924,0x49249249,0x92492492,
|
||||
0x24924924,0x49249249,0x92492492,0x24924924,
|
||||
0x49249249,0x92492492,0x24924924,0x49249249,
|
||||
0x92492492,0x24924924,0x49249249,0x92492492,
|
||||
0x24924924,0x49249249,0x92492492,0x24924924,
|
||||
0x49249249,0x92492492,0x24924924,0x49249249,
|
||||
0x92492492,0x24924924,0x49249249,0x2492,
|
||||
]);
|
||||
|
||||
Lev2TParametricDescription.prototype.toStates5 = /*6 bits per value */ new Int32Array([
|
||||
0x801c0144,0x1453803,0x14700038,0xc0005145,
|
||||
0x1401,0x14,0x140000,0x0,
|
||||
0x510000,0x4e00e007,0xe0051,0x3451451c,
|
||||
0xd015000,0x30cd0000,0xc30c30c,0xc30c30d4,
|
||||
0x40c30c30,0x7c01c014,0xc03458c0,0x185e0c07,
|
||||
0x2830c286,0x830c3083,0xc30030,0x33430c,
|
||||
0x30c3003,0x70051030,0x16301f00,0x8301f00d,
|
||||
0x30a18617,0xc20ca0c,0x431420c3,0xb1450c51,
|
||||
0x14314315,0x4f143145,0x34c05401,0x4c30944c,
|
||||
0x55150c3,0x30830055,0x1430c014,0xc00050c3,
|
||||
0xc30850,0xc314300,0x150053c5,0x25130d30,
|
||||
0x5430d30c,0xc0354154,0x300d0c90,0x1cb2cd0c,
|
||||
0xc91cb0c3,0x72c30cb2,0x14f1cb2c,0xc34c0540,
|
||||
0x34c30944,0x82182214,0x851050c2,0x50851430,
|
||||
0x1400c50c,0x30c5085,0x50c51450,0x150053c,
|
||||
0xc25130d3,0x8850d30,0x1430a086,0x450c2144,
|
||||
0x51cb1c21,0x1c91c70c,0xc71c314b,0x34c1cb1,
|
||||
0x6c328328,0xc328014d,0x76cd34a0,0x1401c50c,
|
||||
0xc31c3140,0x31430c30,0x14,0x30c3005,
|
||||
0xa0ca00d3,0x535b0c,0x4d2830ca,0x514369b3,
|
||||
0xc500d01,0x5965965a,0x30d46546,0x6435030c,
|
||||
0x8034c659,0xdb439032,0x2c390034,0xcaaecb24,
|
||||
0x30832872,0xcb28b1c,0x4b1c32cb,0x70030033,
|
||||
0x30b0cb0c,0xe40ca00d,0x400d36d0,0xb2c90b0e,
|
||||
0xca1cb2ab,0xa2c70c20,0x6575d95c,0x4315b5ce,
|
||||
0x95c53831,0x28034c5d,0x9b705583,0xa1455830,
|
||||
0xc76cd6c,0x40143085,0x71451c51,0x871430c,
|
||||
0x450000c3,0xd3071451,0x1560ca00,0x560c26dc,
|
||||
0xb35b2851,0xc914369,0x1a14500d,0x46593945,
|
||||
0xcb2c939,0x94507503,0x328034c3,0x9b70558,
|
||||
0xe41c5583,0x72caaeca,0x1c308510,0xc7147287,
|
||||
0x50871c32,0x1470030c,0xd307147,0xc1560ca0,
|
||||
0x1560c26d,0xabb2b907,0x21441cb2,0x38a1c70c,
|
||||
0x8e657394,0x314b1c93,0x39438738,0x43083081,
|
||||
0x31c22432,0x820c510,0x830d7082,0x50c35c33,
|
||||
0xc30c338,0xc31c30c3,0x50c30c30,0xc204514,
|
||||
0x890c90c2,0x31440c70,0xa8208208,0xea0df0c3,
|
||||
0x8a231430,0xa28a28a2,0x28a28a1e,0x1861868a,
|
||||
0x48308308,0xc3682483,0x14516453,0x4d965845,
|
||||
0xd4659619,0x36590d94,0xd969964,0x546590d9,
|
||||
0x20c20541,0x920d20c,0x5914f0da,0x96114514,
|
||||
0x65865365,0xe89d3519,0x99e7a279,0x9e89e89e,
|
||||
0x81821827,0xb2032430,0x18b28920,0x422492c7,
|
||||
0xb28b0d72,0x3872c35c,0xc30d72cb,0x32cb2972,
|
||||
0x1c75cb0c,0xc90c204e,0xa2482c80,0x24b1c62c,
|
||||
0xc3a89089,0xb0ea2e42,0x9669a31c,0xa4966a28,
|
||||
0x59a8a269,0x8175e7a,0xb203243,0x718b2892,
|
||||
0x4114105c,0x17597658,0x74ce5d96,0x5c36572d,
|
||||
0xd92d7297,0xe1ce5d70,0xc90c204,0xca2482c8,
|
||||
0x4171c62,0x5d961045,0x976585d6,0x79669533,
|
||||
0x964965a2,0x659689e6,0x308175e7,0x24510510,
|
||||
0x451031c2,0xe2420841,0x5c338714,0x453851c3,
|
||||
0x51c51451,0xc30c31c,0x451450c7,0x41440c20,
|
||||
0xc708914,0x82105144,0xf1c58c90,0x1470ea0d,
|
||||
0x61861863,0x8a1e85e8,0x8687a8a2,0x3081861,
|
||||
0x24853c51,0x5053c368,0x1341144f,0x96194ce5,
|
||||
0x1544d439,0x94385514,0xe0d90d96,0x5415464,
|
||||
0x4f1440c2,0xf0da0921,0x4513d414,0x533944d0,
|
||||
0x350e6586,0x86082181,0xe89e981d,0x18277689,
|
||||
0x10308182,0x89207185,0x41c718b2,0x14e24224,
|
||||
0xc35cb287,0xe1c73871,0x28e1c514,0xc70c32cb,
|
||||
0x204e1c75,0x1c61440c,0xc62ca248,0x90891071,
|
||||
0x2e41c58c,0xa31c70ea,0xe86175e7,0xa269a475,
|
||||
0x5e7a57a8,0x51030817,0x28920718,0xf38718b,
|
||||
0xe5134114,0x39961758,0xe1ce4ce,0x728e3855,
|
||||
0x5ce0d92d,0xc204e1ce,0x81c61440,0x1c62ca24,
|
||||
0xd04503ce,0x85d63944,0x75338e65,0x5d86075e,
|
||||
0x89e69647,0x75e76576,
|
||||
]);
|
||||
Lev2TParametricDescription.prototype.offsetIncrs5 = /*3 bits per value */ new Int32Array([
|
||||
0x10000000,0xc00000,0x60061,0x400,
|
||||
0x0,0x60000008,0x6b003080,0xdb6ab6db,
|
||||
0x2db6,0x800400,0x49245240,0x11482412,
|
||||
0x104904,0x40020000,0x92292000,0xa4b25924,
|
||||
0x9649658,0xd80c000,0xdb0c001b,0x80db6d86,
|
||||
0x6db01b6d,0xc0600003,0x86000d86,0x6db6c36d,
|
||||
0xddadb6ed,0x300001b6,0x6c360,0xe37236e4,
|
||||
0x46db6236,0xdb6c,0x361b018,0xb91b7200,
|
||||
0x6dbb1b71,0x6db763,0x20100820,0x61248001,
|
||||
0x92492490,0x24820004,0x8041000,0x92400090,
|
||||
0x24924830,0x555b6a49,0x2080012,0x20004804,
|
||||
0x49252449,0x84112492,0x4000928,0x240201,
|
||||
0x92922490,0x58924924,0x49456,0x120d8082,
|
||||
0x6da4800,0x69249249,0x249a01b,0x6c04100,
|
||||
0x6d240009,0x92492483,0x24d5adb4,0x60208001,
|
||||
0x92000483,0x24925236,0x6846da49,0x10400092,
|
||||
0x241b0,0x49291b49,0x636d2492,0x92494935,
|
||||
0x24924924,0x49249249,0x92492492,0x24924924,
|
||||
0x49249249,0x92492492,0x24924924,0x49249249,
|
||||
0x92492492,0x24924924,0x49249249,0x92492492,
|
||||
0x24924924,0x49249249,0x92492492,0x24924924,
|
||||
0x49249249,0x92492492,0x24924924,0x49249249,
|
||||
0x92492492,0x24924924,0x49249249,0x92492492,
|
||||
0x24924924,0x49249249,0x92492492,0x24924924,
|
||||
0x49249249,0x92492492,0x24924924,0x49249249,
|
||||
0x92492492,0x24924924,0x49249249,0x92492492,
|
||||
0x24924924,0x49249249,0x92492492,0x24924924,
|
||||
0x49249249,0x92492492,0x24924924,0x49249249,
|
||||
0x92492492,0x24924924,0x49249249,0x92492492,
|
||||
0x24924924,0x49249249,0x92492492,0x24924924,
|
||||
0x49249249,0x92492492,0x24924924,0x49249249,
|
||||
0x92492492,0x24924924,0x49249249,0x92492492,
|
||||
0x24924924,0x49249249,0x92492492,0x24924924,
|
||||
0x49249249,0x92492492,0x24924924,
|
||||
]);
|
||||
|
||||
class Lev1TParametricDescription extends ParametricDescription {
|
||||
/**
|
||||
* @param {number} absState
|
||||
* @param {number} position
|
||||
* @param {number} vector
|
||||
* @returns {number}
|
||||
*/
|
||||
transition(absState, position, vector) {
|
||||
let state = Math.floor(absState / (this.w + 1));
|
||||
let offset = absState % (this.w + 1);
|
||||
|
||||
if (position === this.w) {
|
||||
if (state < 2) { // eslint-disable-line no-lonely-if
|
||||
const loc = Math.imul(vector, 2) + state;
|
||||
offset += this.unpack(this.offsetIncrs0, loc, 1);
|
||||
state = this.unpack(this.toStates0, loc, 2) - 1;
|
||||
}
|
||||
} else if (position === this.w - 1) {
|
||||
if (state < 3) { // eslint-disable-line no-lonely-if
|
||||
const loc = Math.imul(vector, 3) + state;
|
||||
offset += this.unpack(this.offsetIncrs1, loc, 1);
|
||||
state = this.unpack(this.toStates1, loc, 2) - 1;
|
||||
}
|
||||
} else if (position === this.w - 2) {
|
||||
if (state < 6) { // eslint-disable-line no-lonely-if
|
||||
const loc = Math.imul(vector, 6) + state;
|
||||
offset += this.unpack(this.offsetIncrs2, loc, 2);
|
||||
state = this.unpack(this.toStates2, loc, 3) - 1;
|
||||
}
|
||||
} else {
|
||||
if (state < 6) { // eslint-disable-line no-lonely-if
|
||||
const loc = Math.imul(vector, 6) + state;
|
||||
offset += this.unpack(this.offsetIncrs3, loc, 2);
|
||||
state = this.unpack(this.toStates3, loc, 3) - 1;
|
||||
}
|
||||
}
|
||||
|
||||
if (state === -1) {
|
||||
// null state
|
||||
return -1;
|
||||
} else {
|
||||
// translate back to abs
|
||||
return Math.imul(state, this.w + 1) + offset;
|
||||
}
|
||||
}
|
||||
|
||||
// state map
|
||||
// 0 -> [(0, 0)]
|
||||
// 1 -> [(0, 1)]
|
||||
// 2 -> [(0, 1), (1, 1)]
|
||||
// 3 -> [(0, 1), (1, 1), (2, 1)]
|
||||
// 4 -> [(0, 1), (2, 1)]
|
||||
// 5 -> [t(0, 1), (0, 1), (1, 1), (2, 1)]
|
||||
|
||||
|
||||
/** @param {number} w - length of word being checked */
|
||||
constructor(w) {
|
||||
super(w, 1, new Int32Array([0,1,0,-1,-1,-1]));
|
||||
}
|
||||
}
|
||||
|
||||
Lev1TParametricDescription.prototype.toStates0 = /*2 bits per value */ new Int32Array([
|
||||
0x2,
|
||||
]);
|
||||
Lev1TParametricDescription.prototype.offsetIncrs0 = /*1 bits per value */ new Int32Array([
|
||||
0x0,
|
||||
]);
|
||||
|
||||
Lev1TParametricDescription.prototype.toStates1 = /*2 bits per value */ new Int32Array([
|
||||
0xa43,
|
||||
]);
|
||||
Lev1TParametricDescription.prototype.offsetIncrs1 = /*1 bits per value */ new Int32Array([
|
||||
0x38,
|
||||
]);
|
||||
|
||||
Lev1TParametricDescription.prototype.toStates2 = /*3 bits per value */ new Int32Array([
|
||||
0x12180003,0xb45a4914,0x69,
|
||||
]);
|
||||
Lev1TParametricDescription.prototype.offsetIncrs2 = /*2 bits per value */ new Int32Array([
|
||||
0x558a0000,0x5555,
|
||||
]);
|
||||
|
||||
Lev1TParametricDescription.prototype.toStates3 = /*3 bits per value */ new Int32Array([
|
||||
0x900c0003,0xa1904864,0x45a49169,0x5a6d196a,
|
||||
0x9634,
|
||||
]);
|
||||
Lev1TParametricDescription.prototype.offsetIncrs3 = /*2 bits per value */ new Int32Array([
|
||||
0xa0fc0000,0x5555ba08,0x55555555,
|
||||
]);
|
||||
|
||||
// ====================
|
||||
// WARNING: Nothing should be added below this comment: we need the `initSearch` function to
|
||||
// be called ONLY when the whole file has been parsed and loaded.
|
||||
|
||||
// @ts-expect-error
|
||||
function initSearch(searchIndex) {
|
||||
rawSearchIndex = searchIndex;
|
||||
if (typeof window !== "undefined") {
|
||||
// @ts-expect-error
|
||||
docSearch = new DocSearch(rawSearchIndex, ROOT_PATH, searchState);
|
||||
registerSearchEvents();
|
||||
// If there's a search term in the URL, execute the search now.
|
||||
if (window.searchState.getQueryStringParams().search) {
|
||||
search();
|
||||
}
|
||||
} else if (typeof exports !== "undefined") {
|
||||
// @ts-expect-error
|
||||
docSearch = new DocSearch(rawSearchIndex, ROOT_PATH, searchState);
|
||||
exports.docSearch = docSearch;
|
||||
exports.parseQuery = DocSearch.parseQuery;
|
||||
}
|
||||
}
|
||||
|
||||
if (typeof exports !== "undefined") {
|
||||
exports.initSearch = initSearch;
|
||||
}
|
||||
};
|
||||
|
||||
if (typeof window !== "undefined") {
|
||||
// @ts-expect-error
|
||||
window.initSearch = initSearch;
|
||||
// @ts-expect-error
|
||||
if (window.searchIndex !== undefined) {
|
||||
// @ts-expect-error
|
||||
initSearch(window.searchIndex);
|
||||
}
|
||||
} else {
|
||||
// Running in Node, not a browser. Run initSearch just to produce the
|
||||
// exports.
|
||||
initSearch(new Map());
|
||||
const ROOT_PATH = window.rootPath;
|
||||
/** @type {stringdex.Callbacks|null} */
|
||||
let databaseCallbacks = null;
|
||||
initSearch(window.Stringdex, window.RoaringBitmap, {
|
||||
loadRoot: callbacks => {
|
||||
for (const key in callbacks) {
|
||||
if (Object.hasOwn(callbacks, key)) {
|
||||
// @ts-ignore
|
||||
window[key] = callbacks[key];
|
||||
}
|
||||
}
|
||||
databaseCallbacks = callbacks;
|
||||
// search.index/root is loaded by main.js, so
|
||||
// this script doesn't need to launch it, but
|
||||
// must pick it up
|
||||
// @ts-ignore
|
||||
if (window.searchIndex) {
|
||||
// @ts-ignore
|
||||
window.rr_(window.searchIndex);
|
||||
}
|
||||
},
|
||||
loadTreeByHash: hashHex => {
|
||||
const script = document.createElement("script");
|
||||
script.src = `${ROOT_PATH}/search.index/${hashHex}.js`;
|
||||
script.onerror = e => {
|
||||
if (databaseCallbacks) {
|
||||
databaseCallbacks.err_rn_(hashHex, e);
|
||||
}
|
||||
};
|
||||
document.documentElement.appendChild(script);
|
||||
},
|
||||
loadDataByNameAndHash: (name, hashHex) => {
|
||||
const script = document.createElement("script");
|
||||
script.src = `${ROOT_PATH}/search.index/${name}/${hashHex}.js`;
|
||||
script.onerror = e => {
|
||||
if (databaseCallbacks) {
|
||||
databaseCallbacks.err_rd_(hashHex, e);
|
||||
}
|
||||
};
|
||||
document.documentElement.appendChild(script);
|
||||
},
|
||||
});
|
||||
} else if (typeof exports !== "undefined") {
|
||||
// eslint-disable-next-line no-undef
|
||||
exports.initSearch = initSearch;
|
||||
}
|
||||
|
||||
@@ -1,25 +1,13 @@
|
||||
// Local js definitions:
|
||||
/* global getSettingValue, updateLocalStorage, updateTheme */
|
||||
/* global addClass, removeClass, onEach, onEachLazy */
|
||||
/* global MAIN_ID, getVar, getSettingsButton, getHelpButton, nonnull */
|
||||
/* global MAIN_ID, getVar, nonnull */
|
||||
|
||||
"use strict";
|
||||
|
||||
(function() {
|
||||
const isSettingsPage = window.location.pathname.endsWith("/settings.html");
|
||||
|
||||
/**
|
||||
* @param {Element} elem
|
||||
* @param {EventTarget|null} target
|
||||
*/
|
||||
function elemContainsTarget(elem, target) {
|
||||
if (target instanceof Node) {
|
||||
return elem.contains(target);
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @overload {"theme"|"preferred-dark-theme"|"preferred-light-theme"}
|
||||
* @param {string} settingName
|
||||
@@ -305,10 +293,12 @@
|
||||
}
|
||||
} else {
|
||||
el.setAttribute("tabindex", "-1");
|
||||
const settingsBtn = getSettingsButton();
|
||||
if (settingsBtn !== null) {
|
||||
settingsBtn.appendChild(el);
|
||||
}
|
||||
onEachLazy(document.querySelectorAll(".settings-menu"), menu => {
|
||||
if (menu.offsetWidth !== 0) {
|
||||
menu.appendChild(el);
|
||||
return true;
|
||||
}
|
||||
});
|
||||
}
|
||||
return el;
|
||||
}
|
||||
@@ -317,6 +307,15 @@
|
||||
|
||||
function displaySettings() {
|
||||
settingsMenu.style.display = "";
|
||||
onEachLazy(document.querySelectorAll(".settings-menu"), menu => {
|
||||
if (menu.offsetWidth !== 0) {
|
||||
if (!menu.contains(settingsMenu) && settingsMenu.parentElement) {
|
||||
settingsMenu.parentElement.removeChild(settingsMenu);
|
||||
menu.appendChild(settingsMenu);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
});
|
||||
onEachLazy(settingsMenu.querySelectorAll("input[type='checkbox']"), el => {
|
||||
const val = getSettingValue(el.id);
|
||||
const checked = val === "true";
|
||||
@@ -330,40 +329,37 @@
|
||||
* @param {FocusEvent} event
|
||||
*/
|
||||
function settingsBlurHandler(event) {
|
||||
const helpBtn = getHelpButton();
|
||||
const settingsBtn = getSettingsButton();
|
||||
const helpUnfocused = helpBtn === null ||
|
||||
(!helpBtn.contains(document.activeElement) &&
|
||||
!elemContainsTarget(helpBtn, event.relatedTarget));
|
||||
const settingsUnfocused = settingsBtn === null ||
|
||||
(!settingsBtn.contains(document.activeElement) &&
|
||||
!elemContainsTarget(settingsBtn, event.relatedTarget));
|
||||
if (helpUnfocused && settingsUnfocused) {
|
||||
const isInPopover = onEachLazy(
|
||||
document.querySelectorAll(".settings-menu, .help-menu"),
|
||||
menu => {
|
||||
return menu.contains(document.activeElement) || menu.contains(event.relatedTarget);
|
||||
},
|
||||
);
|
||||
if (!isInPopover) {
|
||||
window.hidePopoverMenus();
|
||||
}
|
||||
}
|
||||
|
||||
if (!isSettingsPage) {
|
||||
// We replace the existing "onclick" callback.
|
||||
// These elements must exist, as (outside of the settings page)
|
||||
// `settings.js` is only loaded after the settings button is clicked.
|
||||
const settingsButton = nonnull(getSettingsButton());
|
||||
const settingsMenu = nonnull(document.getElementById("settings"));
|
||||
settingsButton.onclick = event => {
|
||||
if (elemContainsTarget(settingsMenu, event.target)) {
|
||||
return;
|
||||
}
|
||||
event.preventDefault();
|
||||
const shouldDisplaySettings = settingsMenu.style.display === "none";
|
||||
onEachLazy(document.querySelectorAll(".settings-menu"), settingsButton => {
|
||||
/** @param {MouseEvent} event */
|
||||
settingsButton.querySelector("a").onclick = event => {
|
||||
if (!(event.target instanceof Element) || settingsMenu.contains(event.target)) {
|
||||
return;
|
||||
}
|
||||
event.preventDefault();
|
||||
const shouldDisplaySettings = settingsMenu.style.display === "none";
|
||||
|
||||
window.hideAllModals(false);
|
||||
if (shouldDisplaySettings) {
|
||||
displaySettings();
|
||||
}
|
||||
};
|
||||
settingsButton.onblur = settingsBlurHandler;
|
||||
// the settings button should always have a link in it
|
||||
nonnull(settingsButton.querySelector("a")).onblur = settingsBlurHandler;
|
||||
window.hideAllModals(false);
|
||||
if (shouldDisplaySettings) {
|
||||
displaySettings();
|
||||
}
|
||||
};
|
||||
settingsButton.onblur = settingsBlurHandler;
|
||||
settingsButton.querySelector("a").onblur = settingsBlurHandler;
|
||||
});
|
||||
onEachLazy(settingsMenu.querySelectorAll("input"), el => {
|
||||
el.onblur = settingsBlurHandler;
|
||||
});
|
||||
@@ -377,6 +373,8 @@
|
||||
if (!isSettingsPage) {
|
||||
displaySettings();
|
||||
}
|
||||
removeClass(getSettingsButton(), "rotate");
|
||||
onEachLazy(document.querySelectorAll(".settings-menu"), settingsButton => {
|
||||
removeClass(settingsButton, "rotate");
|
||||
});
|
||||
}, 0);
|
||||
})();
|
||||
|
||||
@@ -7,6 +7,7 @@
|
||||
|
||||
/**
|
||||
* @import * as rustdoc from "./rustdoc.d.ts";
|
||||
* @import * as stringdex from "./stringdex.d.ts";
|
||||
*/
|
||||
|
||||
const builtinThemes = ["light", "dark", "ayu"];
|
||||
@@ -172,7 +173,7 @@ function updateLocalStorage(name, value) {
|
||||
} else {
|
||||
window.localStorage.setItem("rustdoc-" + name, value);
|
||||
}
|
||||
} catch (e) {
|
||||
} catch {
|
||||
// localStorage is not accessible, do nothing
|
||||
}
|
||||
}
|
||||
@@ -189,7 +190,7 @@ function updateLocalStorage(name, value) {
|
||||
function getCurrentValue(name) {
|
||||
try {
|
||||
return window.localStorage.getItem("rustdoc-" + name);
|
||||
} catch (e) {
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
@@ -375,32 +376,6 @@ window.addEventListener("pageshow", ev => {
|
||||
// That's also why this is in storage.js and not main.js.
|
||||
//
|
||||
// [parser]: https://html.spec.whatwg.org/multipage/parsing.html
|
||||
class RustdocSearchElement extends HTMLElement {
|
||||
constructor() {
|
||||
super();
|
||||
}
|
||||
connectedCallback() {
|
||||
const rootPath = getVar("root-path");
|
||||
const currentCrate = getVar("current-crate");
|
||||
this.innerHTML = `<nav class="sub">
|
||||
<form class="search-form">
|
||||
<span></span> <!-- This empty span is a hacky fix for Safari - See #93184 -->
|
||||
<div id="sidebar-button" tabindex="-1">
|
||||
<a href="${rootPath}${currentCrate}/all.html" title="show sidebar"></a>
|
||||
</div>
|
||||
<input
|
||||
class="search-input"
|
||||
name="search"
|
||||
aria-label="Run search in the documentation"
|
||||
autocomplete="off"
|
||||
spellcheck="false"
|
||||
placeholder="Type ‘S’ or ‘/’ to search, ‘?’ for more options…"
|
||||
type="search">
|
||||
</form>
|
||||
</nav>`;
|
||||
}
|
||||
}
|
||||
window.customElements.define("rustdoc-search", RustdocSearchElement);
|
||||
class RustdocToolbarElement extends HTMLElement {
|
||||
constructor() {
|
||||
super();
|
||||
@@ -411,11 +386,15 @@ class RustdocToolbarElement extends HTMLElement {
|
||||
return;
|
||||
}
|
||||
const rootPath = getVar("root-path");
|
||||
const currentUrl = window.location.href.split("?")[0].split("#")[0];
|
||||
this.innerHTML = `
|
||||
<div id="settings-menu" tabindex="-1">
|
||||
<div id="search-button" tabindex="-1">
|
||||
<a href="${currentUrl}?search="><span class="label">Search</span></a>
|
||||
</div>
|
||||
<div class="settings-menu" tabindex="-1">
|
||||
<a href="${rootPath}settings.html"><span class="label">Settings</span></a>
|
||||
</div>
|
||||
<div id="help-button" tabindex="-1">
|
||||
<div class="help-menu" tabindex="-1">
|
||||
<a href="${rootPath}help.html"><span class="label">Help</span></a>
|
||||
</div>
|
||||
<button id="toggle-all-docs"
|
||||
@@ -424,3 +403,31 @@ class="label">Summary</span></button>`;
|
||||
}
|
||||
}
|
||||
window.customElements.define("rustdoc-toolbar", RustdocToolbarElement);
|
||||
class RustdocTopBarElement extends HTMLElement {
|
||||
constructor() {
|
||||
super();
|
||||
}
|
||||
connectedCallback() {
|
||||
const rootPath = getVar("root-path");
|
||||
const tmplt = document.createElement("template");
|
||||
tmplt.innerHTML = `
|
||||
<slot name="sidebar-menu-toggle"></slot>
|
||||
<slot></slot>
|
||||
<slot name="settings-menu"></slot>
|
||||
<slot name="help-menu"></slot>
|
||||
`;
|
||||
const shadow = this.attachShadow({ mode: "open" });
|
||||
shadow.appendChild(tmplt.content.cloneNode(true));
|
||||
this.innerHTML += `
|
||||
<button class="sidebar-menu-toggle" slot="sidebar-menu-toggle" title="show sidebar">
|
||||
</button>
|
||||
<div class="settings-menu" slot="settings-menu" tabindex="-1">
|
||||
<a href="${rootPath}settings.html"><span class="label">Settings</span></a>
|
||||
</div>
|
||||
<div class="help-menu" slot="help-menu" tabindex="-1">
|
||||
<a href="${rootPath}help.html"><span class="label">Help</span></a>
|
||||
</div>
|
||||
`;
|
||||
}
|
||||
}
|
||||
window.customElements.define("rustdoc-topbar", RustdocTopBarElement);
|
||||
|
||||
+165
@@ -0,0 +1,165 @@
|
||||
export = stringdex;
|
||||
|
||||
declare namespace stringdex {
|
||||
/**
|
||||
* The client interface to Stringdex.
|
||||
*/
|
||||
interface Database {
|
||||
getIndex(colname: string): SearchTree|undefined;
|
||||
getData(colname: string): DataColumn|undefined;
|
||||
}
|
||||
/**
|
||||
* A search index file.
|
||||
*/
|
||||
interface SearchTree {
|
||||
trie(): Trie;
|
||||
search(name: Uint8Array|string): Promise<Trie?>;
|
||||
searchLev(name: Uint8Array|string): AsyncGenerator<Trie>;
|
||||
}
|
||||
/**
|
||||
* A compressed node in the search tree.
|
||||
*
|
||||
* This object logically addresses two interleaved trees:
|
||||
* a "prefix tree", and a "suffix tree". If you ask for
|
||||
* generic matches, you get both, but if you ask for one
|
||||
* that excludes suffix-only entries, you'll get prefixes
|
||||
* alone.
|
||||
*/
|
||||
interface Trie {
|
||||
matches(): RoaringBitmap;
|
||||
substringMatches(): AsyncGenerator<RoaringBitmap>;
|
||||
prefixMatches(): AsyncGenerator<RoaringBitmap>;
|
||||
keys(): Uint8Array;
|
||||
keysExcludeSuffixOnly(): Uint8Array;
|
||||
children(): [number, Promise<Trie>][];
|
||||
childrenExcludeSuffixOnly(): [number, Promise<Trie>][];
|
||||
child(id: number): Promise<Trie>?;
|
||||
}
|
||||
/**
|
||||
* The client interface to Stringdex.
|
||||
*/
|
||||
interface DataColumn {
|
||||
isEmpty(id: number): boolean;
|
||||
at(id: number): Promise<Uint8Array|undefined>;
|
||||
length: number,
|
||||
}
|
||||
/**
|
||||
* Callbacks for a host application and VFS backend.
|
||||
*
|
||||
* These functions are calleb with mostly-raw data,
|
||||
* except the JSONP wrapper is removed. For example,
|
||||
* a file with the contents `rr_('{"A":"B"}')` should,
|
||||
* after being pulled in, result in the `rr_` callback
|
||||
* being invoked.
|
||||
*
|
||||
* The success callbacks don't need to supply the name of
|
||||
* the file that succeeded, but, if you want successful error
|
||||
* reporting, you'll need to remember which files are
|
||||
* in flight and report the filename as the first parameter.
|
||||
*/
|
||||
interface Callbacks {
|
||||
/**
|
||||
* Load the root of the search database
|
||||
* @param {string} dataString
|
||||
*/
|
||||
rr_: function(string);
|
||||
err_rr_: function(any);
|
||||
/**
|
||||
* Load a nodefile in the search tree.
|
||||
* A node file may contain multiple nodes;
|
||||
* each node has five fields, separated by newlines.
|
||||
* @param {string} inputBase64
|
||||
*/
|
||||
rn_: function(string);
|
||||
err_rn_: function(string, any);
|
||||
/**
|
||||
* Load a database column partition from a string
|
||||
* @param {string} dataString
|
||||
*/
|
||||
rd_: function(string);
|
||||
err_rd_: function(string, any);
|
||||
/**
|
||||
* Load a database column partition from base64
|
||||
* @param {string} dataString
|
||||
*/
|
||||
rb_: function(string);
|
||||
err_rb_: function(string, any);
|
||||
};
|
||||
/**
|
||||
* Hooks that a VFS layer must provide for stringdex to load data.
|
||||
*
|
||||
* When the root is loaded, the Callbacks object is provided. These
|
||||
* functions should result in callback functions being called with
|
||||
* the contents of the file, or in error callbacks being invoked with
|
||||
* the failed-to-load filename.
|
||||
*/
|
||||
interface Hooks {
|
||||
/**
|
||||
* The first function invoked as part of loading a search database.
|
||||
* This function must, eventually, invoke `rr_` with the string
|
||||
* representation of the root file (the function call wrapper,
|
||||
* `rr_('` and `')`, must be removed).
|
||||
*
|
||||
* The supplied callbacks object is used to feed search data back
|
||||
* to the search engine core. You have to store it, so that
|
||||
* loadTreeByHash and loadDataByNameAndHash can use it.
|
||||
*
|
||||
* If this fails, either throw an exception, or call `err_rr_`
|
||||
* with the error object.
|
||||
*/
|
||||
loadRoot: function(Callbacks);
|
||||
/**
|
||||
* Load a subtree file from the search index.
|
||||
*
|
||||
* If this function succeeds, call `rn_` on the callbacks
|
||||
* object. If it fails, call `err_rn_(hashHex, error)`.
|
||||
*
|
||||
* @param {string} hashHex
|
||||
*/
|
||||
loadTreeByHash: function(string);
|
||||
/**
|
||||
* Load a column partition from the search database.
|
||||
*
|
||||
* If this function succeeds, call `rd_` or `rb_` on the callbacks
|
||||
* object. If it fails, call `err_rd_(hashHex, error)`. or `err_rb_`.
|
||||
* To determine which one, the wrapping function call in the js file
|
||||
* specifies it.
|
||||
*
|
||||
* @param {string} columnName
|
||||
* @param {string} hashHex
|
||||
*/
|
||||
loadDataByNameAndHash: function(string, string);
|
||||
};
|
||||
class RoaringBitmap {
|
||||
constructor(array: Uint8Array|null, start?: number);
|
||||
static makeSingleton(number: number);
|
||||
static everything(): RoaringBitmap;
|
||||
static empty(): RoaringBitmap;
|
||||
isEmpty(): boolean;
|
||||
union(that: RoaringBitmap): RoaringBitmap;
|
||||
intersection(that: RoaringBitmap): RoaringBitmap;
|
||||
contains(number: number): boolean;
|
||||
entries(): Generator<number>;
|
||||
first(): number|null;
|
||||
consumed_len_bytes: number;
|
||||
};
|
||||
|
||||
type Stringdex = {
|
||||
/**
|
||||
* Initialize Stringdex with VFS hooks.
|
||||
* Returns a database that you can use.
|
||||
*/
|
||||
loadDatabase: function(Hooks): Promise<Database>,
|
||||
};
|
||||
|
||||
const Stringdex: Stringdex;
|
||||
const RoaringBitmap: Class<stringdex.RoaringBitmap>;
|
||||
}
|
||||
|
||||
declare global {
|
||||
interface Window {
|
||||
Stringdex: stringdex.Stringdex;
|
||||
RoaringBitmap: Class<stringdex.RoaringBitmap>;
|
||||
StringdexOnload: Array<function(stringdex.Stringdex): any>?;
|
||||
};
|
||||
}
|
||||
@@ -0,0 +1,3217 @@
|
||||
/**
|
||||
* @import * as stringdex from "./stringdex.d.ts"
|
||||
*/
|
||||
|
||||
const EMPTY_UINT8 = new Uint8Array();
|
||||
|
||||
/**
|
||||
* @property {Uint8Array} keysAndCardinalities
|
||||
* @property {Uint8Array[]} containers
|
||||
*/
|
||||
class RoaringBitmap {
|
||||
/**
|
||||
* @param {Uint8Array|null} u8array
|
||||
* @param {number} [startingOffset]
|
||||
*/
|
||||
constructor(u8array, startingOffset) {
|
||||
const start = startingOffset ? startingOffset : 0;
|
||||
let i = start;
|
||||
/** @type {Uint8Array} */
|
||||
this.keysAndCardinalities = EMPTY_UINT8;
|
||||
/** @type {(RoaringBitmapArray|RoaringBitmapBits|RoaringBitmapRun)[]} */
|
||||
this.containers = [];
|
||||
/** @type {number} */
|
||||
this.consumed_len_bytes = 0;
|
||||
if (u8array === null || u8array.length === i || u8array[i] === 0) {
|
||||
return this;
|
||||
} else if (u8array[i] > 0xf0) {
|
||||
// Special representation of tiny sets that are close together
|
||||
const lspecial = u8array[i] & 0x0f;
|
||||
this.keysAndCardinalities = new Uint8Array(lspecial * 4);
|
||||
let pspecial = i + 1;
|
||||
let key = u8array[pspecial + 2] | (u8array[pspecial + 3] << 8);
|
||||
let value = u8array[pspecial] | (u8array[pspecial + 1] << 8);
|
||||
let entry = (key << 16) | value;
|
||||
let container;
|
||||
container = new RoaringBitmapArray(1, new Uint8Array(4));
|
||||
container.array[0] = value & 0xFF;
|
||||
container.array[1] = (value >> 8) & 0xFF;
|
||||
this.containers.push(container);
|
||||
this.keysAndCardinalities[0] = key;
|
||||
this.keysAndCardinalities[1] = key >> 8;
|
||||
pspecial += 4;
|
||||
for (let ispecial = 1; ispecial < lspecial; ispecial += 1) {
|
||||
entry += u8array[pspecial] | (u8array[pspecial + 1] << 8);
|
||||
value = entry & 0xffff;
|
||||
key = entry >> 16;
|
||||
container = this.addToArrayAt(key);
|
||||
const cardinalityOld = container.cardinality;
|
||||
container.array[cardinalityOld * 2] = value & 0xFF;
|
||||
container.array[(cardinalityOld * 2) + 1] = (value >> 8) & 0xFF;
|
||||
container.cardinality = cardinalityOld + 1;
|
||||
pspecial += 2;
|
||||
}
|
||||
this.consumed_len_bytes = pspecial - i;
|
||||
return this;
|
||||
} else if (u8array[i] < 0x3a) {
|
||||
// Special representation of tiny sets with arbitrary 32-bit integers
|
||||
const lspecial = u8array[i];
|
||||
this.keysAndCardinalities = new Uint8Array(lspecial * 4);
|
||||
let pspecial = i + 1;
|
||||
for (let ispecial = 0; ispecial < lspecial; ispecial += 1) {
|
||||
const key = u8array[pspecial + 2] | (u8array[pspecial + 3] << 8);
|
||||
const value = u8array[pspecial] | (u8array[pspecial + 1] << 8);
|
||||
const container = this.addToArrayAt(key);
|
||||
const cardinalityOld = container.cardinality;
|
||||
container.array[cardinalityOld * 2] = value & 0xFF;
|
||||
container.array[(cardinalityOld * 2) + 1] = (value >> 8) & 0xFF;
|
||||
container.cardinality = cardinalityOld + 1;
|
||||
pspecial += 4;
|
||||
}
|
||||
this.consumed_len_bytes = pspecial - i;
|
||||
return this;
|
||||
}
|
||||
// https://github.com/RoaringBitmap/RoaringFormatSpec
|
||||
//
|
||||
// Roaring bitmaps are used for flags that can be kept in their
|
||||
// compressed form, even when loaded into memory. This decoder
|
||||
// turns the containers into objects, but uses byte array
|
||||
// slices of the original format for the data payload.
|
||||
const has_runs = u8array[i] === 0x3b;
|
||||
if (u8array[i] !== 0x3a && u8array[i] !== 0x3b) {
|
||||
throw new Error("not a roaring bitmap: " + u8array[i]);
|
||||
}
|
||||
const size = has_runs ?
|
||||
((u8array[i + 2] | (u8array[i + 3] << 8)) + 1) :
|
||||
((u8array[i + 4] | (u8array[i + 5] << 8) |
|
||||
(u8array[i + 6] << 16) | (u8array[i + 7] << 24)));
|
||||
i += has_runs ? 4 : 8;
|
||||
let is_run;
|
||||
if (has_runs) {
|
||||
const is_run_len = (size + 7) >> 3;
|
||||
is_run = new Uint8Array(u8array.buffer, i + u8array.byteOffset, is_run_len);
|
||||
i += is_run_len;
|
||||
} else {
|
||||
is_run = EMPTY_UINT8;
|
||||
}
|
||||
this.keysAndCardinalities = u8array.subarray(i, i + (size * 4));
|
||||
i += size * 4;
|
||||
let offsets = null;
|
||||
if (!has_runs || size >= 4) {
|
||||
offsets = [];
|
||||
for (let j = 0; j < size; ++j) {
|
||||
offsets.push(u8array[i] | (u8array[i + 1] << 8) | (u8array[i + 2] << 16) |
|
||||
(u8array[i + 3] << 24));
|
||||
i += 4;
|
||||
}
|
||||
}
|
||||
for (let j = 0; j < size; ++j) {
|
||||
if (offsets && offsets[j] !== i - start) {
|
||||
throw new Error(`corrupt bitmap ${j}: ${i - start} / ${offsets[j]}`);
|
||||
}
|
||||
const cardinality = (this.keysAndCardinalities[(j * 4) + 2] |
|
||||
(this.keysAndCardinalities[(j * 4) + 3] << 8)) + 1;
|
||||
if (is_run[j >> 3] & (1 << (j & 0x7))) {
|
||||
const runcount = (u8array[i] | (u8array[i + 1] << 8));
|
||||
i += 2;
|
||||
this.containers.push(new RoaringBitmapRun(
|
||||
runcount,
|
||||
new Uint8Array(u8array.buffer, i + u8array.byteOffset, runcount * 4),
|
||||
));
|
||||
i += runcount * 4;
|
||||
} else if (cardinality >= 4096) {
|
||||
this.containers.push(new RoaringBitmapBits(new Uint8Array(
|
||||
u8array.buffer,
|
||||
i + u8array.byteOffset, 8192,
|
||||
)));
|
||||
i += 8192;
|
||||
} else {
|
||||
const end = cardinality * 2;
|
||||
this.containers.push(new RoaringBitmapArray(
|
||||
cardinality,
|
||||
new Uint8Array(u8array.buffer, i + u8array.byteOffset, end),
|
||||
));
|
||||
i += end;
|
||||
}
|
||||
}
|
||||
this.consumed_len_bytes = i - start;
|
||||
}
|
||||
/**
|
||||
* @param {number} number
|
||||
* @returns {RoaringBitmap}
|
||||
*/
|
||||
static makeSingleton(number) {
|
||||
const result = new RoaringBitmap(null, 0);
|
||||
result.keysAndCardinalities = Uint8Array.of(
|
||||
(number >> 16), (number >> 24),
|
||||
0, 0, // keysAndCardinalities stores the true cardinality minus 1
|
||||
);
|
||||
result.containers.push(new RoaringBitmapArray(
|
||||
1,
|
||||
Uint8Array.of(number, number >> 8),
|
||||
));
|
||||
return result;
|
||||
}
|
||||
/** @returns {RoaringBitmap} */
|
||||
static everything() {
|
||||
if (EVERYTHING_BITMAP.isEmpty()) {
|
||||
let i = 0;
|
||||
const l = 1 << 16;
|
||||
const everything_range = new RoaringBitmapRun(1, Uint8Array.of(0, 0, 0xff, 0xff));
|
||||
EVERYTHING_BITMAP.keysAndCardinalities = new Uint8Array(l * 4);
|
||||
while (i < l) {
|
||||
EVERYTHING_BITMAP.containers.push(everything_range);
|
||||
// key
|
||||
EVERYTHING_BITMAP.keysAndCardinalities[(i * 4) + 0] = i;
|
||||
EVERYTHING_BITMAP.keysAndCardinalities[(i * 4) + 1] = i >> 8;
|
||||
// cardinality (minus one)
|
||||
EVERYTHING_BITMAP.keysAndCardinalities[(i * 4) + 2] = 0xff;
|
||||
EVERYTHING_BITMAP.keysAndCardinalities[(i * 4) + 3] = 0xff;
|
||||
i += 1;
|
||||
}
|
||||
}
|
||||
return EVERYTHING_BITMAP;
|
||||
}
|
||||
/** @returns {RoaringBitmap} */
|
||||
static empty() {
|
||||
return EMPTY_BITMAP;
|
||||
}
|
||||
/** @returns {boolean} */
|
||||
isEmpty() {
|
||||
return this.containers.length === 0;
|
||||
}
|
||||
/**
|
||||
* Helper function used when constructing bitmaps from lists.
|
||||
* Returns an array container with at least two free byte slots
|
||||
* and bumps `this.cardinalities`.
|
||||
* @param {number} key
|
||||
* @returns {RoaringBitmapArray}
|
||||
*/
|
||||
addToArrayAt(key) {
|
||||
let mid = this.getContainerId(key);
|
||||
/** @type {RoaringBitmapArray|RoaringBitmapBits|RoaringBitmapRun} */
|
||||
let container;
|
||||
if (mid === -1) {
|
||||
container = new RoaringBitmapArray(0, new Uint8Array(2));
|
||||
mid = this.containers.length;
|
||||
this.containers.push(container);
|
||||
if (mid * 4 > this.keysAndCardinalities.length) {
|
||||
const keysAndContainers = new Uint8Array(mid * 8);
|
||||
keysAndContainers.set(this.keysAndCardinalities);
|
||||
this.keysAndCardinalities = keysAndContainers;
|
||||
}
|
||||
this.keysAndCardinalities[(mid * 4) + 0] = key;
|
||||
this.keysAndCardinalities[(mid * 4) + 1] = key >> 8;
|
||||
} else {
|
||||
container = this.containers[mid];
|
||||
const cardinalityOld =
|
||||
this.keysAndCardinalities[(mid * 4) + 2] |
|
||||
(this.keysAndCardinalities[(mid * 4) + 3] << 8);
|
||||
const cardinality = cardinalityOld + 1;
|
||||
this.keysAndCardinalities[(mid * 4) + 2] = cardinality;
|
||||
this.keysAndCardinalities[(mid * 4) + 3] = cardinality >> 8;
|
||||
}
|
||||
// the logic for handing this number is annoying, because keysAndCardinalities stores
|
||||
// the cardinality *minus one*, so that it can count up to 65536 with only two bytes
|
||||
// (because empty containers are never stored).
|
||||
//
|
||||
// So, if this is a new container, the stored cardinality contains `0 0`, which is
|
||||
// the proper value of the old cardinality (an imaginary empty container existed).
|
||||
// If this is adding to an existing container, then the above `else` branch bumps it
|
||||
// by one, leaving us with a proper value of `cardinality - 1`.
|
||||
const cardinalityOld =
|
||||
this.keysAndCardinalities[(mid * 4) + 2] |
|
||||
(this.keysAndCardinalities[(mid * 4) + 3] << 8);
|
||||
if (!(container instanceof RoaringBitmapArray) ||
|
||||
container.array.byteLength < ((cardinalityOld + 1) * 2)
|
||||
) {
|
||||
const newBuf = new Uint8Array((cardinalityOld + 1) * 4);
|
||||
let idx = 0;
|
||||
for (const cvalue of container.values()) {
|
||||
newBuf[idx] = cvalue & 0xFF;
|
||||
newBuf[idx + 1] = (cvalue >> 8) & 0xFF;
|
||||
idx += 2;
|
||||
}
|
||||
if (container instanceof RoaringBitmapArray) {
|
||||
container.cardinality = cardinalityOld;
|
||||
container.array = newBuf;
|
||||
return container;
|
||||
}
|
||||
const newcontainer = new RoaringBitmapArray(cardinalityOld, newBuf);
|
||||
this.containers[mid] = newcontainer;
|
||||
return newcontainer;
|
||||
} else {
|
||||
return container;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @param {RoaringBitmap} that
|
||||
* @returns {RoaringBitmap}
|
||||
*/
|
||||
union(that) {
|
||||
if (this.isEmpty()) {
|
||||
return that;
|
||||
}
|
||||
if (that.isEmpty()) {
|
||||
return this;
|
||||
}
|
||||
if (this === RoaringBitmap.everything() || that === RoaringBitmap.everything()) {
|
||||
return RoaringBitmap.everything();
|
||||
}
|
||||
let i = 0;
|
||||
const il = this.containers.length;
|
||||
let j = 0;
|
||||
const jl = that.containers.length;
|
||||
const result = new RoaringBitmap(null, 0);
|
||||
result.keysAndCardinalities = new Uint8Array((il + jl) * 4);
|
||||
while (i < il || j < jl) {
|
||||
const ik = i * 4;
|
||||
const jk = j * 4;
|
||||
const k = result.containers.length * 4;
|
||||
if (j >= jl || (i < il && (
|
||||
(this.keysAndCardinalities[ik + 1] < that.keysAndCardinalities[jk + 1]) ||
|
||||
(this.keysAndCardinalities[ik + 1] === that.keysAndCardinalities[jk + 1] &&
|
||||
this.keysAndCardinalities[ik] < that.keysAndCardinalities[jk])
|
||||
))) {
|
||||
result.keysAndCardinalities[k + 0] = this.keysAndCardinalities[ik + 0];
|
||||
result.keysAndCardinalities[k + 1] = this.keysAndCardinalities[ik + 1];
|
||||
result.keysAndCardinalities[k + 2] = this.keysAndCardinalities[ik + 2];
|
||||
result.keysAndCardinalities[k + 3] = this.keysAndCardinalities[ik + 3];
|
||||
result.containers.push(this.containers[i]);
|
||||
i += 1;
|
||||
} else if (i >= il || (j < jl && (
|
||||
(that.keysAndCardinalities[jk + 1] < this.keysAndCardinalities[ik + 1]) ||
|
||||
(that.keysAndCardinalities[jk + 1] === this.keysAndCardinalities[ik + 1] &&
|
||||
that.keysAndCardinalities[jk] < this.keysAndCardinalities[ik])
|
||||
))) {
|
||||
result.keysAndCardinalities[k + 0] = that.keysAndCardinalities[jk + 0];
|
||||
result.keysAndCardinalities[k + 1] = that.keysAndCardinalities[jk + 1];
|
||||
result.keysAndCardinalities[k + 2] = that.keysAndCardinalities[jk + 2];
|
||||
result.keysAndCardinalities[k + 3] = that.keysAndCardinalities[jk + 3];
|
||||
result.containers.push(that.containers[j]);
|
||||
j += 1;
|
||||
} else {
|
||||
// this key is not smaller than that key
|
||||
// that key is not smaller than this key
|
||||
// they must be equal
|
||||
const thisContainer = this.containers[i];
|
||||
const thatContainer = that.containers[j];
|
||||
let card = 0;
|
||||
if (thisContainer instanceof RoaringBitmapBits &&
|
||||
thatContainer instanceof RoaringBitmapBits
|
||||
) {
|
||||
const resultArray = new Uint8Array(
|
||||
thisContainer.array.length > thatContainer.array.length ?
|
||||
thisContainer.array.length :
|
||||
thatContainer.array.length,
|
||||
);
|
||||
let k = 0;
|
||||
const kl = resultArray.length;
|
||||
while (k < kl) {
|
||||
const c = thisContainer.array[k] | thatContainer.array[k];
|
||||
resultArray[k] = c;
|
||||
card += bitCount(c);
|
||||
k += 1;
|
||||
}
|
||||
result.containers.push(new RoaringBitmapBits(resultArray));
|
||||
} else {
|
||||
const thisValues = thisContainer.values();
|
||||
const thatValues = thatContainer.values();
|
||||
let thisResult = thisValues.next();
|
||||
let thatResult = thatValues.next();
|
||||
/** @type {Array<number>} */
|
||||
const resultValues = [];
|
||||
while (!thatResult.done || !thisResult.done) {
|
||||
// generator will definitely implement the iterator protocol correctly
|
||||
/** @type {number} */
|
||||
const thisValue = thisResult.value;
|
||||
/** @type {number} */
|
||||
const thatValue = thatResult.value;
|
||||
if (thatResult.done || thisValue < thatValue) {
|
||||
resultValues.push(thisValue);
|
||||
thisResult = thisValues.next();
|
||||
} else if (thisResult.done || thatValue < thisValue) {
|
||||
resultValues.push(thatValue);
|
||||
thatResult = thatValues.next();
|
||||
} else {
|
||||
// this value is not smaller than that value
|
||||
// that value is not smaller than this value
|
||||
// they must be equal
|
||||
resultValues.push(thisValue);
|
||||
thisResult = thisValues.next();
|
||||
thatResult = thatValues.next();
|
||||
}
|
||||
}
|
||||
const resultArray = new Uint8Array(resultValues.length * 2);
|
||||
let k = 0;
|
||||
for (const value of resultValues) {
|
||||
// roaring bitmap is little endian
|
||||
resultArray[k] = value & 0xFF;
|
||||
resultArray[k + 1] = (value >> 8) & 0xFF;
|
||||
k += 2;
|
||||
}
|
||||
result.containers.push(new RoaringBitmapArray(
|
||||
resultValues.length,
|
||||
resultArray,
|
||||
));
|
||||
card = resultValues.length;
|
||||
}
|
||||
result.keysAndCardinalities[k + 0] = this.keysAndCardinalities[ik + 0];
|
||||
result.keysAndCardinalities[k + 1] = this.keysAndCardinalities[ik + 1];
|
||||
card -= 1;
|
||||
result.keysAndCardinalities[k + 2] = card;
|
||||
result.keysAndCardinalities[k + 3] = card >> 8;
|
||||
i += 1;
|
||||
j += 1;
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
/**
|
||||
* @param {RoaringBitmap} that
|
||||
* @returns {RoaringBitmap}
|
||||
*/
|
||||
intersection(that) {
|
||||
if (this.isEmpty() || that.isEmpty()) {
|
||||
return EMPTY_BITMAP;
|
||||
}
|
||||
if (this === RoaringBitmap.everything()) {
|
||||
return that;
|
||||
}
|
||||
if (that === RoaringBitmap.everything()) {
|
||||
return this;
|
||||
}
|
||||
let i = 0;
|
||||
const il = this.containers.length;
|
||||
let j = 0;
|
||||
const jl = that.containers.length;
|
||||
const result = new RoaringBitmap(null, 0);
|
||||
result.keysAndCardinalities = new Uint8Array((il > jl ? il : jl) * 4);
|
||||
while (i < il && j < jl) {
|
||||
const ik = i * 4;
|
||||
const jk = j * 4;
|
||||
const k = result.containers.length * 4;
|
||||
if (j >= jl || (i < il && (
|
||||
(this.keysAndCardinalities[ik + 1] < that.keysAndCardinalities[jk + 1]) ||
|
||||
(this.keysAndCardinalities[ik + 1] === that.keysAndCardinalities[jk + 1] &&
|
||||
this.keysAndCardinalities[ik] < that.keysAndCardinalities[jk])
|
||||
))) {
|
||||
i += 1;
|
||||
} else if (i >= il || (j < jl && (
|
||||
(that.keysAndCardinalities[jk + 1] < this.keysAndCardinalities[ik + 1]) ||
|
||||
(that.keysAndCardinalities[jk + 1] === this.keysAndCardinalities[ik + 1] &&
|
||||
that.keysAndCardinalities[jk] < this.keysAndCardinalities[ik])
|
||||
))) {
|
||||
j += 1;
|
||||
} else {
|
||||
// this key is not smaller than that key
|
||||
// that key is not smaller than this key
|
||||
// they must be equal
|
||||
const thisContainer = this.containers[i];
|
||||
const thatContainer = that.containers[j];
|
||||
let card = 0;
|
||||
if (thisContainer instanceof RoaringBitmapBits &&
|
||||
thatContainer instanceof RoaringBitmapBits
|
||||
) {
|
||||
const resultArray = new Uint8Array(
|
||||
thisContainer.array.length > thatContainer.array.length ?
|
||||
thisContainer.array.length :
|
||||
thatContainer.array.length,
|
||||
);
|
||||
let k = 0;
|
||||
const kl = resultArray.length;
|
||||
while (k < kl) {
|
||||
const c = thisContainer.array[k] & thatContainer.array[k];
|
||||
resultArray[k] = c;
|
||||
card += bitCount(c);
|
||||
k += 1;
|
||||
}
|
||||
if (card !== 0) {
|
||||
result.containers.push(new RoaringBitmapBits(resultArray));
|
||||
}
|
||||
} else {
|
||||
const thisValues = thisContainer.values();
|
||||
const thatValues = thatContainer.values();
|
||||
let thisValue = thisValues.next();
|
||||
let thatValue = thatValues.next();
|
||||
const resultValues = [];
|
||||
while (!thatValue.done && !thisValue.done) {
|
||||
if (thisValue.value < thatValue.value) {
|
||||
thisValue = thisValues.next();
|
||||
} else if (thatValue.value < thisValue.value) {
|
||||
thatValue = thatValues.next();
|
||||
} else {
|
||||
// this value is not smaller than that value
|
||||
// that value is not smaller than this value
|
||||
// they must be equal
|
||||
resultValues.push(thisValue.value);
|
||||
thisValue = thisValues.next();
|
||||
thatValue = thatValues.next();
|
||||
}
|
||||
}
|
||||
card = resultValues.length;
|
||||
if (card !== 0) {
|
||||
const resultArray = new Uint8Array(resultValues.length * 2);
|
||||
let k = 0;
|
||||
for (const value of resultValues) {
|
||||
// roaring bitmap is little endian
|
||||
resultArray[k] = value & 0xFF;
|
||||
resultArray[k + 1] = (value >> 8) & 0xFF;
|
||||
k += 2;
|
||||
}
|
||||
result.containers.push(new RoaringBitmapArray(
|
||||
resultValues.length,
|
||||
resultArray,
|
||||
));
|
||||
}
|
||||
}
|
||||
if (card !== 0) {
|
||||
result.keysAndCardinalities[k + 0] = this.keysAndCardinalities[ik + 0];
|
||||
result.keysAndCardinalities[k + 1] = this.keysAndCardinalities[ik + 1];
|
||||
card -= 1;
|
||||
result.keysAndCardinalities[k + 2] = card;
|
||||
result.keysAndCardinalities[k + 3] = card >> 8;
|
||||
}
|
||||
i += 1;
|
||||
j += 1;
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
/** @param {number} keyvalue */
|
||||
contains(keyvalue) {
|
||||
const key = keyvalue >> 16;
|
||||
const value = keyvalue & 0xFFFF;
|
||||
const mid = this.getContainerId(key);
|
||||
return mid === -1 ? false : this.containers[mid].contains(value);
|
||||
}
|
||||
/**
|
||||
* @param {number} key
|
||||
* @returns {number}
|
||||
*/
|
||||
getContainerId(key) {
|
||||
// Binary search algorithm copied from
|
||||
// https://en.wikipedia.org/wiki/Binary_search#Procedure
|
||||
//
|
||||
// Format is required by specification to be sorted.
|
||||
// Because keys are 16 bits and unique, length can't be
|
||||
// bigger than 2**16, and because we have 32 bits of safe int,
|
||||
// left + right can't overflow.
|
||||
let left = 0;
|
||||
let right = this.containers.length - 1;
|
||||
while (left <= right) {
|
||||
const mid = Math.floor((left + right) / 2);
|
||||
const x = this.keysAndCardinalities[(mid * 4)] |
|
||||
(this.keysAndCardinalities[(mid * 4) + 1] << 8);
|
||||
if (x < key) {
|
||||
left = mid + 1;
|
||||
} else if (x > key) {
|
||||
right = mid - 1;
|
||||
} else {
|
||||
return mid;
|
||||
}
|
||||
}
|
||||
return -1;
|
||||
}
|
||||
* entries() {
|
||||
const l = this.containers.length;
|
||||
for (let i = 0; i < l; ++i) {
|
||||
const key = this.keysAndCardinalities[i * 4] |
|
||||
(this.keysAndCardinalities[(i * 4) + 1] << 8);
|
||||
for (const value of this.containers[i].values()) {
|
||||
yield (key << 16) | value;
|
||||
}
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @returns {number|null}
|
||||
*/
|
||||
first() {
|
||||
for (const entry of this.entries()) {
|
||||
return entry;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
/**
|
||||
* @returns {number}
|
||||
*/
|
||||
cardinality() {
|
||||
let result = 0;
|
||||
const l = this.containers.length;
|
||||
for (let i = 0; i < l; ++i) {
|
||||
const card = this.keysAndCardinalities[(i * 4) + 2] |
|
||||
(this.keysAndCardinalities[(i * 4) + 3] << 8);
|
||||
result += card + 1;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
class RoaringBitmapRun {
|
||||
/**
|
||||
* @param {number} runcount
|
||||
* @param {Uint8Array} array
|
||||
*/
|
||||
constructor(runcount, array) {
|
||||
this.runcount = runcount;
|
||||
this.array = array;
|
||||
}
|
||||
/** @param {number} value */
|
||||
contains(value) {
|
||||
// Binary search algorithm copied from
|
||||
// https://en.wikipedia.org/wiki/Binary_search#Procedure
|
||||
//
|
||||
// Since runcount is stored as 16 bits, left + right
|
||||
// can't overflow.
|
||||
let left = 0;
|
||||
let right = this.runcount - 1;
|
||||
while (left <= right) {
|
||||
const mid = (left + right) >> 1;
|
||||
const i = mid * 4;
|
||||
const start = this.array[i] | (this.array[i + 1] << 8);
|
||||
const lenm1 = this.array[i + 2] | (this.array[i + 3] << 8);
|
||||
if ((start + lenm1) < value) {
|
||||
left = mid + 1;
|
||||
} else if (start > value) {
|
||||
right = mid - 1;
|
||||
} else {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
* values() {
|
||||
let i = 0;
|
||||
while (i < this.runcount) {
|
||||
const start = this.array[i * 4] | (this.array[(i * 4) + 1] << 8);
|
||||
const lenm1 = this.array[(i * 4) + 2] | (this.array[(i * 4) + 3] << 8);
|
||||
let value = start;
|
||||
let j = 0;
|
||||
while (j <= lenm1) {
|
||||
yield value;
|
||||
value += 1;
|
||||
j += 1;
|
||||
}
|
||||
i += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
class RoaringBitmapArray {
|
||||
/**
|
||||
* @param {number} cardinality
|
||||
* @param {Uint8Array} array
|
||||
*/
|
||||
constructor(cardinality, array) {
|
||||
this.cardinality = cardinality;
|
||||
this.array = array;
|
||||
}
|
||||
/** @param {number} value */
|
||||
contains(value) {
|
||||
// Binary search algorithm copied from
|
||||
// https://en.wikipedia.org/wiki/Binary_search#Procedure
|
||||
//
|
||||
// Since cardinality can't be higher than 4096, left + right
|
||||
// cannot overflow.
|
||||
let left = 0;
|
||||
let right = this.cardinality - 1;
|
||||
while (left <= right) {
|
||||
const mid = (left + right) >> 1;
|
||||
const i = mid * 2;
|
||||
const x = this.array[i] | (this.array[i + 1] << 8);
|
||||
if (x < value) {
|
||||
left = mid + 1;
|
||||
} else if (x > value) {
|
||||
right = mid - 1;
|
||||
} else {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
/** @returns {Generator<number>} */
|
||||
* values() {
|
||||
let i = 0;
|
||||
const l = this.cardinality * 2;
|
||||
while (i < l) {
|
||||
yield this.array[i] | (this.array[i + 1] << 8);
|
||||
i += 2;
|
||||
}
|
||||
}
|
||||
}
|
||||
class RoaringBitmapBits {
|
||||
/**
|
||||
* @param {Uint8Array} array
|
||||
*/
|
||||
constructor(array) {
|
||||
this.array = array;
|
||||
}
|
||||
/** @param {number} value */
|
||||
contains(value) {
|
||||
return !!(this.array[value >> 3] & (1 << (value & 7)));
|
||||
}
|
||||
* values() {
|
||||
let i = 0;
|
||||
const l = this.array.length << 3;
|
||||
while (i < l) {
|
||||
if (this.contains(i)) {
|
||||
yield i;
|
||||
}
|
||||
i += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const EMPTY_BITMAP = new RoaringBitmap(null, 0);
|
||||
EMPTY_BITMAP.consumed_len_bytes = 0;
|
||||
const EMPTY_BITMAP1 = new RoaringBitmap(null, 0);
|
||||
EMPTY_BITMAP1.consumed_len_bytes = 1;
|
||||
const EVERYTHING_BITMAP = new RoaringBitmap(null, 0);
|
||||
|
||||
/**
|
||||
* A mapping from six byte nodeids to an arbitrary value.
|
||||
* We don't just use `Map` because that requires double hashing.
|
||||
* @template T
|
||||
* @property {Uint8Array} keys
|
||||
* @property {T[]} values
|
||||
* @property {number} size
|
||||
* @property {number} capacityClass
|
||||
*/
|
||||
class HashTable {
|
||||
/**
|
||||
* Construct an empty hash table.
|
||||
*/
|
||||
constructor() {
|
||||
this.keys = EMPTY_UINT8;
|
||||
/** @type {(T|undefined)[]} */
|
||||
this.values = [];
|
||||
this.size = 0;
|
||||
this.capacityClass = 0;
|
||||
}
|
||||
/**
|
||||
* @returns {Generator<[Uint8Array, T]>}
|
||||
*/
|
||||
* entries() {
|
||||
const keys = this.keys;
|
||||
const values = this.values;
|
||||
const l = this.values.length;
|
||||
for (let i = 0; i < l; i += 1) {
|
||||
const value = values[i];
|
||||
if (value !== undefined) {
|
||||
yield [keys.subarray(i * 6, (i + 1) * 6), value];
|
||||
}
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Add a value to the hash table.
|
||||
* @param {Uint8Array} key
|
||||
* @param {T} value
|
||||
*/
|
||||
set(key, value) {
|
||||
// 90 % load factor
|
||||
if (this.size * 10 >= this.values.length * 9) {
|
||||
const keys = this.keys;
|
||||
const values = this.values;
|
||||
const l = values.length;
|
||||
this.capacityClass += 1;
|
||||
const capacity = 1 << this.capacityClass;
|
||||
this.keys = new Uint8Array(capacity * 6);
|
||||
this.values = [];
|
||||
for (let i = 0; i < capacity; i += 1) {
|
||||
this.values.push(undefined);
|
||||
}
|
||||
this.size = 0;
|
||||
for (let i = 0; i < l; i += 1) {
|
||||
const oldValue = values[i];
|
||||
if (oldValue !== undefined) {
|
||||
this.setNoGrow(keys, i * 6, oldValue);
|
||||
}
|
||||
}
|
||||
}
|
||||
this.setNoGrow(key, 0, value);
|
||||
}
|
||||
/**
|
||||
* @param {Uint8Array} key
|
||||
* @param {number} start
|
||||
* @param {T} value
|
||||
*/
|
||||
setNoGrow(key, start, value) {
|
||||
const mask = ~(0xffffffff << this.capacityClass);
|
||||
const keys = this.keys;
|
||||
const values = this.values;
|
||||
const l = 1 << this.capacityClass;
|
||||
// because we know that our values are already hashed,
|
||||
// just chop off the lower four bytes
|
||||
let slot = (
|
||||
(key[start + 2] << 24) |
|
||||
(key[start + 3] << 16) |
|
||||
(key[start + 4] << 8) |
|
||||
key[start + 5]
|
||||
) & mask;
|
||||
for (let distance = 0; distance < l; ) {
|
||||
const j = slot * 6;
|
||||
const otherValue = values[slot];
|
||||
if (otherValue === undefined) {
|
||||
values[slot] = value;
|
||||
const keysStart = slot * 6;
|
||||
keys[keysStart + 0] = key[start + 0];
|
||||
keys[keysStart + 1] = key[start + 1];
|
||||
keys[keysStart + 2] = key[start + 2];
|
||||
keys[keysStart + 3] = key[start + 3];
|
||||
keys[keysStart + 4] = key[start + 4];
|
||||
keys[keysStart + 5] = key[start + 5];
|
||||
this.size += 1;
|
||||
break;
|
||||
} else if (
|
||||
key[start + 0] === keys[j + 0] &&
|
||||
key[start + 1] === keys[j + 1] &&
|
||||
key[start + 2] === keys[j + 2] &&
|
||||
key[start + 3] === keys[j + 3] &&
|
||||
key[start + 4] === keys[j + 4] &&
|
||||
key[start + 5] === keys[j + 5]
|
||||
) {
|
||||
values[slot] = value;
|
||||
break;
|
||||
} else {
|
||||
const otherPreferredSlot = (
|
||||
(keys[j + 2] << 24) | (keys[j + 3] << 16) |
|
||||
(keys[j + 4] << 8) | keys[j + 5]
|
||||
) & mask;
|
||||
const otherDistance = otherPreferredSlot <= slot ?
|
||||
slot - otherPreferredSlot :
|
||||
(l - otherPreferredSlot) + slot;
|
||||
if (distance > otherDistance) {
|
||||
// if the other key is closer to its preferred slot than this one,
|
||||
// then insert our node in its place and swap
|
||||
//
|
||||
// https://cglab.ca/~abeinges/blah/robinhood-part-1/
|
||||
const otherKey = keys.slice(j, j + 6);
|
||||
values[slot] = value;
|
||||
value = otherValue;
|
||||
keys[j + 0] = key[start + 0];
|
||||
keys[j + 1] = key[start + 1];
|
||||
keys[j + 2] = key[start + 2];
|
||||
keys[j + 3] = key[start + 3];
|
||||
keys[j + 4] = key[start + 4];
|
||||
keys[j + 5] = key[start + 5];
|
||||
key = otherKey;
|
||||
start = 0;
|
||||
distance = otherDistance;
|
||||
}
|
||||
distance += 1;
|
||||
slot = (slot + 1) & mask;
|
||||
}
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Retrieve a value
|
||||
* @param {Uint8Array} key
|
||||
* @returns {T|undefined}
|
||||
*/
|
||||
get(key) {
|
||||
if (key.length !== 6) {
|
||||
throw "invalid key";
|
||||
}
|
||||
return this.getWithOffsetKey(key, 0);
|
||||
}
|
||||
/**
|
||||
* Retrieve a value
|
||||
* @param {Uint8Array} key
|
||||
* @param {number} start
|
||||
* @returns {T|undefined}
|
||||
*/
|
||||
getWithOffsetKey(key, start) {
|
||||
const mask = ~(0xffffffff << this.capacityClass);
|
||||
const keys = this.keys;
|
||||
const values = this.values;
|
||||
const l = 1 << this.capacityClass;
|
||||
// because we know that our values are already hashed,
|
||||
// just chop off the lower four bytes
|
||||
let slot = (
|
||||
(key[start + 2] << 24) |
|
||||
(key[start + 3] << 16) |
|
||||
(key[start + 4] << 8) |
|
||||
key[start + 5]
|
||||
) & mask;
|
||||
for (let distance = 0; distance < l; distance += 1) {
|
||||
const j = slot * 6;
|
||||
const value = values[slot];
|
||||
if (value === undefined) {
|
||||
break;
|
||||
} else if (
|
||||
key[start + 0] === keys[j + 0] &&
|
||||
key[start + 1] === keys[j + 1] &&
|
||||
key[start + 2] === keys[j + 2] &&
|
||||
key[start + 3] === keys[j + 3] &&
|
||||
key[start + 4] === keys[j + 4] &&
|
||||
key[start + 5] === keys[j + 5]
|
||||
) {
|
||||
return value;
|
||||
} else {
|
||||
const otherPreferredSlot = (
|
||||
(keys[j + 2] << 24) | (keys[j + 3] << 16) |
|
||||
(keys[j + 4] << 8) | keys[j + 5]
|
||||
) & mask;
|
||||
const otherDistance = otherPreferredSlot <= slot ?
|
||||
slot - otherPreferredSlot :
|
||||
(l - otherPreferredSlot) + slot;
|
||||
if (distance > otherDistance) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
slot = (slot + 1) & mask;
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
|
||||
/*eslint-disable */
|
||||
// ignore-tidy-linelength
|
||||
/** <https://stackoverflow.com/questions/43122082/efficiently-count-the-number-of-bits-in-an-integer-in-javascript>
|
||||
* @param {number} n
|
||||
* @returns {number}
|
||||
*/
|
||||
function bitCount(n) {
|
||||
n = (~~n) - ((n >> 1) & 0x55555555);
|
||||
n = (n & 0x33333333) + ((n >> 2) & 0x33333333);
|
||||
return ((n + (n >> 4) & 0xF0F0F0F) * 0x1010101) >> 24;
|
||||
}
|
||||
/*eslint-enable */
|
||||
|
||||
/**
|
||||
* @param {stringdex.Hooks} hooks
|
||||
* @returns {Promise<stringdex.Database>}
|
||||
*/
|
||||
function loadDatabase(hooks) {
|
||||
/** @type {stringdex.Callbacks} */
|
||||
const callbacks = {
|
||||
rr_: function(data) {
|
||||
const dataObj = JSON.parse(data);
|
||||
for (const colName of Object.keys(dataObj)) {
|
||||
if (Object.hasOwn(dataObj[colName], "I")) {
|
||||
registry.searchTreeRoots.set(
|
||||
colName,
|
||||
makeSearchTreeFromBase64(dataObj[colName].I)[1],
|
||||
);
|
||||
}
|
||||
if (Object.hasOwn(dataObj[colName], "N")) {
|
||||
const counts = [];
|
||||
const countsstring = dataObj[colName]["N"];
|
||||
let i = 0;
|
||||
const l = countsstring.length;
|
||||
while (i < l) {
|
||||
let n = 0;
|
||||
let c = countsstring.charCodeAt(i);
|
||||
while (c < 96) { // 96 = "`"
|
||||
n = (n << 4) | (c & 0xF);
|
||||
i += 1;
|
||||
c = countsstring.charCodeAt(i);
|
||||
}
|
||||
n = (n << 4) | (c & 0xF);
|
||||
counts.push(n);
|
||||
i += 1;
|
||||
}
|
||||
registry.dataColumns.set(colName, new DataColumn(
|
||||
counts,
|
||||
makeUint8ArrayFromBase64(dataObj[colName]["H"]),
|
||||
new RoaringBitmap(makeUint8ArrayFromBase64(dataObj[colName]["E"]), 0),
|
||||
colName,
|
||||
));
|
||||
}
|
||||
}
|
||||
const cb = registry.searchTreeRootCallback;
|
||||
if (cb) {
|
||||
cb(null, new Database(registry.searchTreeRoots, registry.dataColumns));
|
||||
}
|
||||
},
|
||||
err_rr_: function(err) {
|
||||
const cb = registry.searchTreeRootCallback;
|
||||
if (cb) {
|
||||
cb(err, null);
|
||||
}
|
||||
},
|
||||
rd_: function(dataString) {
|
||||
const l = dataString.length;
|
||||
const data = new Uint8Array(l);
|
||||
for (let i = 0; i < l; ++i) {
|
||||
data[i] = dataString.charCodeAt(i);
|
||||
}
|
||||
loadColumnFromBytes(data);
|
||||
},
|
||||
err_rd_: function(filename, err) {
|
||||
const nodeid = makeUint8ArrayFromHex(filename);
|
||||
const cb = registry.dataColumnLoadPromiseCallbacks.get(nodeid);
|
||||
if (cb) {
|
||||
cb(err, null);
|
||||
}
|
||||
},
|
||||
rb_: function(dataString64) {
|
||||
loadColumnFromBytes(makeUint8ArrayFromBase64(dataString64));
|
||||
},
|
||||
err_rb_: function(filename, err) {
|
||||
const nodeid = makeUint8ArrayFromHex(filename);
|
||||
const cb = registry.dataColumnLoadPromiseCallbacks.get(nodeid);
|
||||
if (cb) {
|
||||
cb(err, null);
|
||||
}
|
||||
},
|
||||
rn_: function(inputBase64) {
|
||||
const [nodeid, tree] = makeSearchTreeFromBase64(inputBase64);
|
||||
const cb = registry.searchTreeLoadPromiseCallbacks.get(nodeid);
|
||||
if (cb) {
|
||||
cb(null, tree);
|
||||
registry.searchTreeLoadPromiseCallbacks.set(nodeid, null);
|
||||
}
|
||||
},
|
||||
err_rn_: function(filename, err) {
|
||||
const nodeid = makeUint8ArrayFromHex(filename);
|
||||
const cb = registry.searchTreeLoadPromiseCallbacks.get(nodeid);
|
||||
if (cb) {
|
||||
cb(err, null);
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
/**
|
||||
* @type {{
|
||||
* searchTreeRoots: Map<string, SearchTree>;
|
||||
* searchTreeLoadPromiseCallbacks: HashTable<(function(any, SearchTree?): any)|null>;
|
||||
* searchTreePromises: HashTable<Promise<SearchTree>>;
|
||||
* dataColumnLoadPromiseCallbacks: HashTable<function(any, Uint8Array[]?): any>;
|
||||
* dataColumns: Map<string, DataColumn>;
|
||||
* dataColumnsBuckets: Map<string, HashTable<Promise<Uint8Array[]>>>;
|
||||
* searchTreeLoadByNodeID: function(Uint8Array): Promise<SearchTree>;
|
||||
* searchTreeRootCallback?: function(any, Database?): any;
|
||||
* dataLoadByNameAndHash: function(string, Uint8Array): Promise<Uint8Array[]>;
|
||||
* }}
|
||||
*/
|
||||
const registry = {
|
||||
searchTreeRoots: new Map(),
|
||||
searchTreeLoadPromiseCallbacks: new HashTable(),
|
||||
searchTreePromises: new HashTable(),
|
||||
dataColumnLoadPromiseCallbacks: new HashTable(),
|
||||
dataColumns: new Map(),
|
||||
dataColumnsBuckets: new Map(),
|
||||
searchTreeLoadByNodeID: function(nodeid) {
|
||||
const existingPromise = registry.searchTreePromises.get(nodeid);
|
||||
if (existingPromise) {
|
||||
return existingPromise;
|
||||
}
|
||||
/** @type {Promise<SearchTree>} */
|
||||
let newPromise;
|
||||
if ((nodeid[0] & 0x80) !== 0) {
|
||||
const isWhole = (nodeid[0] & 0x40) !== 0;
|
||||
let leaves;
|
||||
if ((nodeid[0] & 0x10) !== 0) {
|
||||
let id1 = (nodeid[2] << 8) | nodeid[3];
|
||||
if ((nodeid[0] & 0x20) !== 0) {
|
||||
// when data is present, id1 can be up to 20 bits
|
||||
id1 |= ((nodeid[1] & 0x0f) << 16);
|
||||
} else {
|
||||
// otherwise, we fit in 28
|
||||
id1 |= ((nodeid[0] & 0x0f) << 24) | (nodeid[1] << 16);
|
||||
}
|
||||
const id2 = id1 + ((nodeid[4] << 8) | nodeid[5]);
|
||||
leaves = RoaringBitmap.makeSingleton(id1)
|
||||
.union(RoaringBitmap.makeSingleton(id2));
|
||||
} else {
|
||||
leaves = RoaringBitmap.makeSingleton(
|
||||
(nodeid[2] << 24) | (nodeid[3] << 16) |
|
||||
(nodeid[4] << 8) | nodeid[5],
|
||||
);
|
||||
}
|
||||
const data = (nodeid[0] & 0x20) !== 0 ?
|
||||
Uint8Array.of(((nodeid[0] & 0x0f) << 4) | (nodeid[1] >> 4)) :
|
||||
EMPTY_UINT8;
|
||||
newPromise = Promise.resolve(new SearchTree(
|
||||
EMPTY_SEARCH_TREE_BRANCHES,
|
||||
EMPTY_SEARCH_TREE_BRANCHES,
|
||||
data,
|
||||
isWhole ? leaves : EMPTY_BITMAP,
|
||||
isWhole ? EMPTY_BITMAP : leaves,
|
||||
));
|
||||
} else {
|
||||
const hashHex = makeHexFromUint8Array(nodeid);
|
||||
newPromise = new Promise((resolve, reject) => {
|
||||
const cb = registry.searchTreeLoadPromiseCallbacks.get(nodeid);
|
||||
if (cb) {
|
||||
registry.searchTreeLoadPromiseCallbacks.set(nodeid, (err, data) => {
|
||||
cb(err, data);
|
||||
if (data) {
|
||||
resolve(data);
|
||||
} else {
|
||||
reject(err);
|
||||
}
|
||||
});
|
||||
} else {
|
||||
registry.searchTreeLoadPromiseCallbacks.set(nodeid, (err, data) => {
|
||||
if (data) {
|
||||
resolve(data);
|
||||
} else {
|
||||
reject(err);
|
||||
}
|
||||
});
|
||||
hooks.loadTreeByHash(hashHex);
|
||||
}
|
||||
});
|
||||
}
|
||||
registry.searchTreePromises.set(nodeid, newPromise);
|
||||
return newPromise;
|
||||
},
|
||||
dataLoadByNameAndHash: function(name, hash) {
|
||||
let dataColumnBuckets = registry.dataColumnsBuckets.get(name);
|
||||
if (dataColumnBuckets === undefined) {
|
||||
dataColumnBuckets = new HashTable();
|
||||
registry.dataColumnsBuckets.set(name, dataColumnBuckets);
|
||||
}
|
||||
const existingBucket = dataColumnBuckets.get(hash);
|
||||
if (existingBucket) {
|
||||
return existingBucket;
|
||||
}
|
||||
const hashHex = makeHexFromUint8Array(hash);
|
||||
/** @type {Promise<Uint8Array[]>} */
|
||||
const newBucket = new Promise((resolve, reject) => {
|
||||
const cb = registry.dataColumnLoadPromiseCallbacks.get(hash);
|
||||
if (cb) {
|
||||
registry.dataColumnLoadPromiseCallbacks.set(hash, (err, data) => {
|
||||
cb(err, data);
|
||||
if (data) {
|
||||
resolve(data);
|
||||
} else {
|
||||
reject(err);
|
||||
}
|
||||
});
|
||||
} else {
|
||||
registry.dataColumnLoadPromiseCallbacks.set(hash, (err, data) => {
|
||||
if (data) {
|
||||
resolve(data);
|
||||
} else {
|
||||
reject(err);
|
||||
}
|
||||
});
|
||||
hooks.loadDataByNameAndHash(name, hashHex);
|
||||
}
|
||||
});
|
||||
dataColumnBuckets.set(hash, newBucket);
|
||||
return newBucket;
|
||||
},
|
||||
};
|
||||
|
||||
/**
|
||||
* The set of child subtrees.
|
||||
* @type {{
|
||||
* nodeids: Uint8Array,
|
||||
* subtrees: Array<Promise<SearchTree>|null>,
|
||||
* }}
|
||||
*/
|
||||
class SearchTreeBranches {
|
||||
/**
|
||||
* Construct the subtree list with `length` nulls
|
||||
* @param {number} length
|
||||
* @param {Uint8Array} nodeids
|
||||
*/
|
||||
constructor(length, nodeids) {
|
||||
this.nodeids = nodeids;
|
||||
this.subtrees = [];
|
||||
for (let i = 0; i < length; ++i) {
|
||||
this.subtrees.push(null);
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @param {number} i
|
||||
* @returns {Uint8Array}
|
||||
*/
|
||||
getNodeID(i) {
|
||||
return new Uint8Array(
|
||||
this.nodeids.buffer,
|
||||
this.nodeids.byteOffset + (i * 6),
|
||||
6,
|
||||
);
|
||||
}
|
||||
// https://github.com/microsoft/TypeScript/issues/17227
|
||||
/** @returns {Generator<[number, Promise<SearchTree>|null]>} */
|
||||
entries() {
|
||||
throw new Error();
|
||||
}
|
||||
/**
|
||||
* @param {number} _k
|
||||
* @returns {number}
|
||||
*/
|
||||
getIndex(_k) {
|
||||
throw new Error();
|
||||
}
|
||||
/**
|
||||
* @param {number} _i
|
||||
* @returns {number}
|
||||
*/
|
||||
getKey(_i) {
|
||||
throw new Error();
|
||||
}
|
||||
/**
|
||||
* @returns {Uint8Array}
|
||||
*/
|
||||
getKeys() {
|
||||
throw new Error();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* A sorted array of search tree branches.
|
||||
*
|
||||
* @type {{
|
||||
* keys: Uint8Array,
|
||||
* nodeids: Uint8Array,
|
||||
* subtrees: Array<Promise<SearchTree>|null>,
|
||||
* }}
|
||||
*/
|
||||
class SearchTreeBranchesArray extends SearchTreeBranches {
|
||||
/**
|
||||
* @param {Uint8Array} keys
|
||||
* @param {Uint8Array} nodeids
|
||||
*/
|
||||
constructor(keys, nodeids) {
|
||||
super(keys.length, nodeids);
|
||||
this.keys = keys;
|
||||
let i = 1;
|
||||
while (i < this.keys.length) {
|
||||
if (this.keys[i - 1] >= this.keys[i]) {
|
||||
throw new Error("HERE");
|
||||
}
|
||||
i += 1;
|
||||
}
|
||||
}
|
||||
/** @returns {Generator<[number, Promise<SearchTree>|null]>} */
|
||||
* entries() {
|
||||
let i = 0;
|
||||
const l = this.keys.length;
|
||||
while (i < l) {
|
||||
yield [this.keys[i], this.subtrees[i]];
|
||||
i += 1;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @param {number} k
|
||||
* @returns {number}
|
||||
*/
|
||||
getIndex(k) {
|
||||
// Since length can't be bigger than 256,
|
||||
// left + right can't overflow.
|
||||
let left = 0;
|
||||
let right = this.keys.length - 1;
|
||||
while (left <= right) {
|
||||
const mid = (left + right) >> 1;
|
||||
if (this.keys[mid] < k) {
|
||||
left = mid + 1;
|
||||
} else if (this.keys[mid] > k) {
|
||||
right = mid - 1;
|
||||
} else {
|
||||
return mid;
|
||||
}
|
||||
}
|
||||
return -1;
|
||||
}
|
||||
/**
|
||||
* @param {number} i
|
||||
* @returns {number}
|
||||
*/
|
||||
getKey(i) {
|
||||
return this.keys[i];
|
||||
}
|
||||
/**
|
||||
* @returns {Uint8Array}
|
||||
*/
|
||||
getKeys() {
|
||||
return this.keys;
|
||||
}
|
||||
}
|
||||
|
||||
const EMPTY_SEARCH_TREE_BRANCHES = new SearchTreeBranchesArray(
|
||||
EMPTY_UINT8,
|
||||
EMPTY_UINT8,
|
||||
);
|
||||
|
||||
/** @type {number[]} */
|
||||
const SHORT_ALPHABITMAP_CHARS = [];
|
||||
for (let i = 0x61; i <= 0x7A; ++i) {
|
||||
if (i === 0x76 || i === 0x71) {
|
||||
// 24 entries, 26 letters, so we skip q and v
|
||||
continue;
|
||||
}
|
||||
SHORT_ALPHABITMAP_CHARS.push(i);
|
||||
}
|
||||
|
||||
/** @type {number[]} */
|
||||
const LONG_ALPHABITMAP_CHARS = [0x31, 0x32, 0x33, 0x34, 0x35, 0x36];
|
||||
for (let i = 0x61; i <= 0x7A; ++i) {
|
||||
LONG_ALPHABITMAP_CHARS.push(i);
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {number[]} alphabitmap_chars
|
||||
* @param {number} width
|
||||
* @return {(typeof SearchTreeBranches)&{"ALPHABITMAP_CHARS": number[], "width": number}}
|
||||
*/
|
||||
function makeSearchTreeBranchesAlphaBitmapClass(alphabitmap_chars, width) {
|
||||
const bitwidth = width * 8;
|
||||
const cls = class SearchTreeBranchesAlphaBitmap extends SearchTreeBranches {
|
||||
/**
|
||||
* @param {number} bitmap
|
||||
* @param {Uint8Array} nodeids
|
||||
*/
|
||||
constructor(bitmap, nodeids) {
|
||||
super(nodeids.length / 6, nodeids);
|
||||
if (nodeids.length / 6 !== bitCount(bitmap)) {
|
||||
throw new Error(`mismatch ${bitmap} ${nodeids}`);
|
||||
}
|
||||
this.bitmap = bitmap;
|
||||
this.nodeids = nodeids;
|
||||
}
|
||||
/** @returns {Generator<[number, Promise<SearchTree>|null]>} */
|
||||
* entries() {
|
||||
let i = 0;
|
||||
let j = 0;
|
||||
while (i < bitwidth) {
|
||||
if (this.bitmap & (1 << i)) {
|
||||
yield [alphabitmap_chars[i], this.subtrees[j]];
|
||||
j += 1;
|
||||
}
|
||||
i += 1;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @param {number} k
|
||||
* @returns {number}
|
||||
*/
|
||||
getIndex(k) {
|
||||
//return this.getKeys().indexOf(k);
|
||||
const ix = alphabitmap_chars.indexOf(k);
|
||||
if (ix < 0) {
|
||||
return ix;
|
||||
}
|
||||
const result = bitCount(~(0xffffffff << ix) & this.bitmap);
|
||||
return result >= this.subtrees.length ? -1 : result;
|
||||
}
|
||||
/**
|
||||
* @param {number} branch_index
|
||||
* @returns {number}
|
||||
*/
|
||||
getKey(branch_index) {
|
||||
//return this.getKeys()[branch_index];
|
||||
let alpha_index = 0;
|
||||
while (branch_index >= 0) {
|
||||
if (this.bitmap & (1 << alpha_index)) {
|
||||
branch_index -= 1;
|
||||
}
|
||||
alpha_index += 1;
|
||||
}
|
||||
return alphabitmap_chars[alpha_index];
|
||||
}
|
||||
/**
|
||||
* @returns {Uint8Array}
|
||||
*/
|
||||
getKeys() {
|
||||
const length = bitCount(this.bitmap);
|
||||
const result = new Uint8Array(length);
|
||||
let result_index = 0;
|
||||
for (let alpha_index = 0; alpha_index < bitwidth; ++alpha_index) {
|
||||
if (this.bitmap & (1 << alpha_index)) {
|
||||
result[result_index] = alphabitmap_chars[alpha_index];
|
||||
result_index += 1;
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
};
|
||||
cls.ALPHABITMAP_CHARS = alphabitmap_chars;
|
||||
cls.width = width;
|
||||
return cls;
|
||||
}
|
||||
|
||||
const SearchTreeBranchesShortAlphaBitmap =
|
||||
makeSearchTreeBranchesAlphaBitmapClass(SHORT_ALPHABITMAP_CHARS, 3);
|
||||
|
||||
const SearchTreeBranchesLongAlphaBitmap =
|
||||
makeSearchTreeBranchesAlphaBitmapClass(LONG_ALPHABITMAP_CHARS, 4);
|
||||
|
||||
/**
|
||||
* A [suffix tree], used for name-based search.
|
||||
*
|
||||
* This data structure is used to drive substring matches,
|
||||
* such as matching the query "link" to `LinkedList`,
|
||||
* and Lev-distance matches, such as matching the
|
||||
* query "hahsmap" to `HashMap`.
|
||||
*
|
||||
* [suffix tree]: https://en.wikipedia.org/wiki/Suffix_tree
|
||||
*
|
||||
* branches
|
||||
* : A sorted-array map of subtrees.
|
||||
*
|
||||
* data
|
||||
* : The substring represented by this node. The root node
|
||||
* is always empty.
|
||||
*
|
||||
* leaves_suffix
|
||||
* : The IDs of every entry that matches. Levenshtein matches
|
||||
* won't include these.
|
||||
*
|
||||
* leaves_whole
|
||||
* : The IDs of every entry that matches exactly. Levenshtein matches
|
||||
* will include these.
|
||||
*
|
||||
* @type {{
|
||||
* might_have_prefix_branches: SearchTreeBranches,
|
||||
* branches: SearchTreeBranches,
|
||||
* data: Uint8Array,
|
||||
* leaves_suffix: RoaringBitmap,
|
||||
* leaves_whole: RoaringBitmap,
|
||||
* }}
|
||||
*/
|
||||
class SearchTree {
|
||||
/**
|
||||
* @param {SearchTreeBranches} branches
|
||||
* @param {SearchTreeBranches} might_have_prefix_branches
|
||||
* @param {Uint8Array} data
|
||||
* @param {RoaringBitmap} leaves_whole
|
||||
* @param {RoaringBitmap} leaves_suffix
|
||||
*/
|
||||
constructor(
|
||||
branches,
|
||||
might_have_prefix_branches,
|
||||
data,
|
||||
leaves_whole,
|
||||
leaves_suffix,
|
||||
) {
|
||||
this.might_have_prefix_branches = might_have_prefix_branches;
|
||||
this.branches = branches;
|
||||
this.data = data;
|
||||
this.leaves_suffix = leaves_suffix;
|
||||
this.leaves_whole = leaves_whole;
|
||||
}
|
||||
/**
|
||||
* Returns the Trie for the root node.
|
||||
*
|
||||
* A Trie pointer refers to a single node in a logical decompressed search tree
|
||||
* (the real search tree is compressed).
|
||||
*
|
||||
* @return {Trie}
|
||||
*/
|
||||
trie() {
|
||||
return new Trie(this, 0);
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the trie representing `name`
|
||||
* @param {Uint8Array|string} name
|
||||
* @returns {Promise<Trie?>}
|
||||
*/
|
||||
async search(name) {
|
||||
if (typeof name === "string") {
|
||||
const utf8encoder = new TextEncoder();
|
||||
name = utf8encoder.encode(name);
|
||||
}
|
||||
let trie = this.trie();
|
||||
for (const datum of name) {
|
||||
// code point definitely exists
|
||||
const newTrie = trie.child(datum);
|
||||
if (newTrie) {
|
||||
trie = await newTrie;
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
return trie;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Uint8Array|string} name
|
||||
* @returns {AsyncGenerator<Trie>}
|
||||
*/
|
||||
async* searchLev(name) {
|
||||
if (typeof name === "string") {
|
||||
const utf8encoder = new TextEncoder();
|
||||
name = utf8encoder.encode(name);
|
||||
}
|
||||
const w = name.length;
|
||||
if (w < 3) {
|
||||
const trie = await this.search(name);
|
||||
if (trie !== null) {
|
||||
yield trie;
|
||||
}
|
||||
return;
|
||||
}
|
||||
const levParams = w >= 6 ?
|
||||
new Lev2TParametricDescription(w) :
|
||||
new Lev1TParametricDescription(w);
|
||||
/** @type {Array<[Promise<Trie>, number]>} */
|
||||
const stack = [[Promise.resolve(this.trie()), 0]];
|
||||
const n = levParams.n;
|
||||
while (stack.length !== 0) {
|
||||
// It's not empty
|
||||
/** @type {[Promise<Trie>, number]} */
|
||||
//@ts-expect-error
|
||||
const [triePromise, levState] = stack.pop();
|
||||
const trie = await triePromise;
|
||||
for (const byte of trie.keysExcludeSuffixOnly()) {
|
||||
const levPos = levParams.getPosition(levState);
|
||||
const vector = levParams.getVector(
|
||||
name,
|
||||
byte,
|
||||
levPos,
|
||||
Math.min(w, levPos + (2 * n) + 1),
|
||||
);
|
||||
const newLevState = levParams.transition(
|
||||
levState,
|
||||
levPos,
|
||||
vector,
|
||||
);
|
||||
if (newLevState >= 0) {
|
||||
const child = trie.child(byte);
|
||||
if (child) {
|
||||
stack.push([child, newLevState]);
|
||||
if (levParams.isAccept(newLevState)) {
|
||||
yield child;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* A representation of a set of strings in the search index,
|
||||
* as a subset of the entire tree.
|
||||
*/
|
||||
class Trie {
|
||||
/**
|
||||
* @param {SearchTree} tree
|
||||
* @param {number} offset
|
||||
*/
|
||||
constructor(tree, offset) {
|
||||
this.tree = tree;
|
||||
this.offset = offset;
|
||||
}
|
||||
|
||||
/**
|
||||
* All exact matches for the string represented by this node.
|
||||
* @returns {RoaringBitmap}
|
||||
*/
|
||||
matches() {
|
||||
if (this.offset === this.tree.data.length) {
|
||||
return this.tree.leaves_whole;
|
||||
} else {
|
||||
return EMPTY_BITMAP;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* All matches for strings that contain the string represented by this node.
|
||||
* @returns {AsyncGenerator<RoaringBitmap>}
|
||||
*/
|
||||
async* substringMatches() {
|
||||
/** @type {Promise<SearchTree>[]} */
|
||||
let layer = [Promise.resolve(this.tree)];
|
||||
while (layer.length) {
|
||||
const current_layer = layer;
|
||||
layer = [];
|
||||
for await (const tree of current_layer) {
|
||||
yield tree.leaves_whole.union(tree.leaves_suffix);
|
||||
}
|
||||
/** @type {HashTable<[number, SearchTree][]>} */
|
||||
const subnodes = new HashTable();
|
||||
for await (const node of current_layer) {
|
||||
const branches = node.branches;
|
||||
const l = branches.subtrees.length;
|
||||
for (let i = 0; i < l; ++i) {
|
||||
const subtree = branches.subtrees[i];
|
||||
if (subtree) {
|
||||
layer.push(subtree);
|
||||
} else if (subtree === null) {
|
||||
const byte = branches.getKey(i);
|
||||
const newnode = branches.getNodeID(i);
|
||||
if (!newnode) {
|
||||
throw new Error(`malformed tree; no node for key ${byte}`);
|
||||
} else {
|
||||
let subnode_list = subnodes.get(newnode);
|
||||
if (!subnode_list) {
|
||||
subnode_list = [[byte, node]];
|
||||
subnodes.set(newnode, subnode_list);
|
||||
} else {
|
||||
subnode_list.push([byte, node]);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
throw new Error(`malformed tree; index ${i} does not exist`);
|
||||
}
|
||||
}
|
||||
}
|
||||
for (const [newnode, subnode_list] of subnodes.entries()) {
|
||||
const res = registry.searchTreeLoadByNodeID(newnode);
|
||||
for (const [byte, node] of subnode_list) {
|
||||
const branches = node.branches;
|
||||
const might_have_prefix_branches = node.might_have_prefix_branches;
|
||||
const i = branches.getIndex(byte);
|
||||
branches.subtrees[i] = res;
|
||||
const mhpI = might_have_prefix_branches.getIndex(byte);
|
||||
if (mhpI !== -1) {
|
||||
might_have_prefix_branches.subtrees[mhpI] = res;
|
||||
}
|
||||
}
|
||||
layer.push(res);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* All matches for strings that start with the string represented by this node.
|
||||
* @returns {AsyncGenerator<RoaringBitmap>}
|
||||
*/
|
||||
async* prefixMatches() {
|
||||
/** @type {{node: Promise<SearchTree>, len: number}[]} */
|
||||
let layer = [{node: Promise.resolve(this.tree), len: 0}];
|
||||
// https://en.wikipedia.org/wiki/Heap_(data_structure)#Implementation_using_arrays
|
||||
/** @type {{bitmap: RoaringBitmap, length: number}[]} */
|
||||
const backlog = [];
|
||||
while (layer.length !== 0 || backlog.length !== 0) {
|
||||
const current_layer = layer;
|
||||
layer = [];
|
||||
let minLength = null;
|
||||
// push every entry in the current layer into the backlog,
|
||||
// a min-heap of result entries
|
||||
// we then yield the smallest ones (can't yield bigger ones
|
||||
// if we want to do them in order)
|
||||
for (const {node, len} of current_layer) {
|
||||
const tree = await node;
|
||||
const length = len + tree.data.length;
|
||||
if (minLength === null || length < minLength) {
|
||||
minLength = length;
|
||||
}
|
||||
let backlogSlot = backlog.length;
|
||||
backlog.push({bitmap: tree.leaves_whole, length});
|
||||
while (backlogSlot > 0 &&
|
||||
backlog[backlogSlot].length < backlog[(backlogSlot - 1) >> 1].length
|
||||
) {
|
||||
const parentSlot = (backlogSlot - 1) >> 1;
|
||||
const parent = backlog[parentSlot];
|
||||
backlog[parentSlot] = backlog[backlogSlot];
|
||||
backlog[backlogSlot] = parent;
|
||||
backlogSlot = parentSlot;
|
||||
}
|
||||
}
|
||||
// yield nodes in length order, smallest one first
|
||||
// we know that, whatever the smallest item is
|
||||
// every child will be bigger than that
|
||||
while (backlog.length !== 0) {
|
||||
const backlogEntry = backlog[0];
|
||||
if (minLength !== null && backlogEntry.length > minLength) {
|
||||
break;
|
||||
}
|
||||
if (!backlogEntry.bitmap.isEmpty()) {
|
||||
yield backlogEntry.bitmap;
|
||||
}
|
||||
backlog[0] = backlog[backlog.length - 1];
|
||||
backlog.length -= 1;
|
||||
let backlogSlot = 0;
|
||||
const backlogLength = backlog.length;
|
||||
while (backlogSlot < backlogLength) {
|
||||
const leftSlot = (backlogSlot << 1) + 1;
|
||||
const rightSlot = (backlogSlot << 1) + 2;
|
||||
let smallest = backlogSlot;
|
||||
if (leftSlot < backlogLength &&
|
||||
backlog[leftSlot].length < backlog[smallest].length
|
||||
) {
|
||||
smallest = leftSlot;
|
||||
}
|
||||
if (rightSlot < backlogLength &&
|
||||
backlog[rightSlot].length < backlog[smallest].length
|
||||
) {
|
||||
smallest = rightSlot;
|
||||
}
|
||||
if (smallest === backlogSlot) {
|
||||
break;
|
||||
} else {
|
||||
const tmp = backlog[backlogSlot];
|
||||
backlog[backlogSlot] = backlog[smallest];
|
||||
backlog[smallest] = tmp;
|
||||
backlogSlot = smallest;
|
||||
}
|
||||
}
|
||||
}
|
||||
// if we still have more subtrees to walk, then keep going
|
||||
/** @type {HashTable<{byte: number, tree: SearchTree, len: number}[]>} */
|
||||
const subnodes = new HashTable();
|
||||
for await (const {node, len} of current_layer) {
|
||||
const tree = await node;
|
||||
const length = len + tree.data.length;
|
||||
const mhp_branches = tree.might_have_prefix_branches;
|
||||
const l = mhp_branches.subtrees.length;
|
||||
for (let i = 0; i < l; ++i) {
|
||||
const len = length + 1;
|
||||
const subtree = mhp_branches.subtrees[i];
|
||||
if (subtree) {
|
||||
layer.push({node: subtree, len});
|
||||
} else if (subtree === null) {
|
||||
const byte = mhp_branches.getKey(i);
|
||||
const newnode = mhp_branches.getNodeID(i);
|
||||
if (!newnode) {
|
||||
throw new Error(`malformed tree; no node for key ${byte}`);
|
||||
} else {
|
||||
let subnode_list = subnodes.get(newnode);
|
||||
if (!subnode_list) {
|
||||
subnode_list = [{byte, tree, len}];
|
||||
subnodes.set(newnode, subnode_list);
|
||||
} else {
|
||||
subnode_list.push({byte, tree, len});
|
||||
}
|
||||
}
|
||||
} else {
|
||||
throw new Error(`malformed tree; index ${i} does not exist`);
|
||||
}
|
||||
}
|
||||
}
|
||||
for (const [newnode, subnode_list] of subnodes.entries()) {
|
||||
const res = registry.searchTreeLoadByNodeID(newnode);
|
||||
let len = Number.MAX_SAFE_INTEGER;
|
||||
for (const {byte, tree, len: subtreelen} of subnode_list) {
|
||||
if (subtreelen < len) {
|
||||
len = subtreelen;
|
||||
}
|
||||
const mhp_branches = tree.might_have_prefix_branches;
|
||||
const i = mhp_branches.getIndex(byte);
|
||||
mhp_branches.subtrees[i] = res;
|
||||
const branches = tree.branches;
|
||||
const bi = branches.getIndex(byte);
|
||||
branches.subtrees[bi] = res;
|
||||
}
|
||||
layer.push({node: res, len});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns all keys that are children of this node.
|
||||
* @returns {Uint8Array}
|
||||
*/
|
||||
keys() {
|
||||
const data = this.tree.data;
|
||||
if (this.offset === data.length) {
|
||||
return this.tree.branches.getKeys();
|
||||
} else {
|
||||
return Uint8Array.of(data[this.offset]);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns all nodes that are direct children of this node.
|
||||
* @returns {[number, Promise<Trie>][]}
|
||||
*/
|
||||
children() {
|
||||
const data = this.tree.data;
|
||||
if (this.offset === data.length) {
|
||||
/** @type {[number, Promise<Trie>][]} */
|
||||
const nodes = [];
|
||||
let i = 0;
|
||||
for (const [k, v] of this.tree.branches.entries()) {
|
||||
/** @type {Promise<SearchTree>} */
|
||||
let node;
|
||||
if (v) {
|
||||
node = v;
|
||||
} else {
|
||||
const newnode = this.tree.branches.getNodeID(i);
|
||||
if (!newnode) {
|
||||
throw new Error(`malformed tree; no hash for key ${k}: ${newnode} \
|
||||
${this.tree.branches.nodeids} ${this.tree.branches.getKeys()}`);
|
||||
}
|
||||
node = registry.searchTreeLoadByNodeID(newnode);
|
||||
this.tree.branches.subtrees[i] = node;
|
||||
const mhpI = this.tree.might_have_prefix_branches.getIndex(k);
|
||||
if (mhpI !== -1) {
|
||||
this.tree.might_have_prefix_branches.subtrees[mhpI] = node;
|
||||
}
|
||||
}
|
||||
nodes.push([k, node.then(node => node.trie())]);
|
||||
i += 1;
|
||||
}
|
||||
return nodes;
|
||||
} else {
|
||||
/** @type {number} */
|
||||
const codePoint = data[this.offset];
|
||||
const trie = new Trie(this.tree, this.offset + 1);
|
||||
return [[codePoint, Promise.resolve(trie)]];
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns all keys that are children of this node.
|
||||
* @returns {Uint8Array}
|
||||
*/
|
||||
keysExcludeSuffixOnly() {
|
||||
const data = this.tree.data;
|
||||
if (this.offset === data.length) {
|
||||
return this.tree.might_have_prefix_branches.getKeys();
|
||||
} else {
|
||||
return Uint8Array.of(data[this.offset]);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns all nodes that are direct children of this node.
|
||||
* @returns {[number, Promise<Trie>][]}
|
||||
*/
|
||||
childrenExcludeSuffixOnly() {
|
||||
const data = this.tree.data;
|
||||
if (this.offset === data.length) {
|
||||
/** @type {[number, Promise<Trie>][]} */
|
||||
const nodes = [];
|
||||
let i = 0;
|
||||
for (const [k, v] of this.tree.might_have_prefix_branches.entries()) {
|
||||
/** @type {Promise<SearchTree>} */
|
||||
let node;
|
||||
if (v) {
|
||||
node = v;
|
||||
} else {
|
||||
const newnode = this.tree.might_have_prefix_branches.getNodeID(i);
|
||||
if (!newnode) {
|
||||
throw new Error(`malformed tree; no node for key ${k}`);
|
||||
}
|
||||
node = registry.searchTreeLoadByNodeID(newnode);
|
||||
this.tree.might_have_prefix_branches.subtrees[i] = node;
|
||||
this.tree.branches.subtrees[this.tree.branches.getIndex(k)] = node;
|
||||
}
|
||||
nodes.push([k, node.then(node => node.trie())]);
|
||||
i += 1;
|
||||
}
|
||||
return nodes;
|
||||
} else {
|
||||
/** @type {number} */
|
||||
const codePoint = data[this.offset];
|
||||
const trie = new Trie(this.tree, this.offset + 1);
|
||||
return [[codePoint, Promise.resolve(trie)]];
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a single node that is a direct child of this node.
|
||||
* @param {number} byte
|
||||
* @returns {Promise<Trie>?}
|
||||
*/
|
||||
child(byte) {
|
||||
if (this.offset === this.tree.data.length) {
|
||||
const i = this.tree.branches.getIndex(byte);
|
||||
if (i !== -1) {
|
||||
let branch = this.tree.branches.subtrees[i];
|
||||
if (branch === null) {
|
||||
const newnode = this.tree.branches.getNodeID(i);
|
||||
if (!newnode) {
|
||||
throw new Error(`malformed tree; no node for key ${byte}`);
|
||||
}
|
||||
branch = registry.searchTreeLoadByNodeID(newnode);
|
||||
this.tree.branches.subtrees[i] = branch;
|
||||
const mhpI = this.tree.might_have_prefix_branches.getIndex(byte);
|
||||
if (mhpI !== -1) {
|
||||
this.tree.might_have_prefix_branches.subtrees[mhpI] = branch;
|
||||
}
|
||||
}
|
||||
return branch.then(branch => branch.trie());
|
||||
}
|
||||
} else if (this.tree.data[this.offset] === byte) {
|
||||
return Promise.resolve(new Trie(this.tree, this.offset + 1));
|
||||
}
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
class DataColumn {
|
||||
/**
|
||||
* Construct the wrapper object for a data column.
|
||||
* @param {number[]} counts
|
||||
* @param {Uint8Array} hashes
|
||||
* @param {RoaringBitmap} emptyset
|
||||
* @param {string} name
|
||||
*/
|
||||
constructor(counts, hashes, emptyset, name) {
|
||||
this.hashes = hashes;
|
||||
this.emptyset = emptyset;
|
||||
this.name = name;
|
||||
/** @type {{"hash": Uint8Array, "data": Promise<Uint8Array[]>?, "end": number}[]} */
|
||||
this.buckets = [];
|
||||
this.bucket_keys = [];
|
||||
const l = counts.length;
|
||||
let k = 0;
|
||||
let totalLength = 0;
|
||||
for (let i = 0; i < l; ++i) {
|
||||
const count = counts[i];
|
||||
totalLength += count;
|
||||
const start = k;
|
||||
for (let j = 0; j < count; ++j) {
|
||||
if (emptyset.contains(k)) {
|
||||
j -= 1;
|
||||
}
|
||||
k += 1;
|
||||
}
|
||||
const end = k;
|
||||
const bucket = {hash: hashes.subarray(i * 6, (i + 1) * 6), data: null, end, count};
|
||||
this.buckets.push(bucket);
|
||||
this.bucket_keys.push(start);
|
||||
}
|
||||
this.length = totalLength;
|
||||
}
|
||||
/**
|
||||
* Check if a cell contains the empty string.
|
||||
* @param {number} id
|
||||
* @returns {boolean}
|
||||
*/
|
||||
isEmpty(id) {
|
||||
return this.emptyset.contains(id);
|
||||
}
|
||||
/**
|
||||
* Look up a cell by row ID.
|
||||
* @param {number} id
|
||||
* @returns {Promise<Uint8Array|undefined>}
|
||||
*/
|
||||
async at(id) {
|
||||
if (this.emptyset.contains(id)) {
|
||||
return Promise.resolve(EMPTY_UINT8);
|
||||
} else {
|
||||
let idx = -1;
|
||||
while (this.bucket_keys[idx + 1] <= id) {
|
||||
idx += 1;
|
||||
}
|
||||
if (idx === -1 || idx >= this.bucket_keys.length) {
|
||||
return Promise.resolve(undefined);
|
||||
} else {
|
||||
const start = this.bucket_keys[idx];
|
||||
const {hash, end} = this.buckets[idx];
|
||||
let data = this.buckets[idx].data;
|
||||
if (data === null) {
|
||||
const dataSansEmptyset = await registry.dataLoadByNameAndHash(
|
||||
this.name,
|
||||
hash,
|
||||
);
|
||||
// After the `await` resolves, another task might fill
|
||||
// in the data. If so, we should use that.
|
||||
data = this.buckets[idx].data;
|
||||
if (data !== null) {
|
||||
return (await data)[id - start];
|
||||
}
|
||||
/** @type {(Uint8Array[])|null} */
|
||||
let dataWithEmptyset = null;
|
||||
let pos = start;
|
||||
let insertCount = 0;
|
||||
while (pos < end) {
|
||||
if (this.emptyset.contains(pos)) {
|
||||
if (dataWithEmptyset === null) {
|
||||
dataWithEmptyset = dataSansEmptyset.splice(0, insertCount);
|
||||
} else if (insertCount !== 0) {
|
||||
dataWithEmptyset.push(
|
||||
...dataSansEmptyset.splice(0, insertCount),
|
||||
);
|
||||
}
|
||||
insertCount = 0;
|
||||
dataWithEmptyset.push(EMPTY_UINT8);
|
||||
} else {
|
||||
insertCount += 1;
|
||||
}
|
||||
pos += 1;
|
||||
}
|
||||
data = Promise.resolve(
|
||||
dataWithEmptyset === null ?
|
||||
dataSansEmptyset :
|
||||
dataWithEmptyset.concat(dataSansEmptyset),
|
||||
);
|
||||
this.buckets[idx].data = data;
|
||||
}
|
||||
return (await data)[id - start];
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
class Database {
|
||||
/**
|
||||
* The primary frontend for accessing data in this index.
|
||||
*
|
||||
* @param {Map<string, SearchTree>} searchTreeRoots
|
||||
* @param {Map<string, DataColumn>} dataColumns
|
||||
*/
|
||||
constructor(searchTreeRoots, dataColumns) {
|
||||
this.searchTreeRoots = searchTreeRoots;
|
||||
this.dataColumns = dataColumns;
|
||||
}
|
||||
/**
|
||||
* Search a column by name, returning verbatim matched IDs.
|
||||
* @param {string} colname
|
||||
* @returns {SearchTree|undefined}
|
||||
*/
|
||||
getIndex(colname) {
|
||||
return this.searchTreeRoots.get(colname);
|
||||
}
|
||||
/**
|
||||
* Look up a cell by column ID and row ID.
|
||||
* @param {string} colname
|
||||
* @returns {DataColumn|undefined}
|
||||
*/
|
||||
getData(colname) {
|
||||
return this.dataColumns.get(colname);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Load a data column.
|
||||
* @param {Uint8Array} data
|
||||
*/
|
||||
function loadColumnFromBytes(data) {
|
||||
const hashBuf = Uint8Array.of(0, 0, 0, 0, 0, 0, 0, 0);
|
||||
const truncatedHash = hashBuf.subarray(2, 8);
|
||||
siphashOfBytes(data, 0, 0, 0, 0, hashBuf);
|
||||
const cb = registry.dataColumnLoadPromiseCallbacks.get(truncatedHash);
|
||||
if (cb) {
|
||||
const backrefs = [];
|
||||
const dataSansEmptyset = [];
|
||||
let i = 0;
|
||||
const l = data.length;
|
||||
while (i < l) {
|
||||
let c = data[i];
|
||||
if (c >= 48 && c <= 63) { // 48 = "0", 63 = "?"
|
||||
dataSansEmptyset.push(backrefs[c - 48]);
|
||||
i += 1;
|
||||
} else {
|
||||
let n = 0;
|
||||
while (c < 96) { // 96 = "`"
|
||||
n = (n << 4) | (c & 0xF);
|
||||
i += 1;
|
||||
c = data[i];
|
||||
}
|
||||
n = (n << 4) | (c & 0xF);
|
||||
i += 1;
|
||||
const item = data.subarray(i, i + n);
|
||||
dataSansEmptyset.push(item);
|
||||
i += n;
|
||||
backrefs.unshift(item);
|
||||
if (backrefs.length > 16) {
|
||||
backrefs.pop();
|
||||
}
|
||||
}
|
||||
}
|
||||
cb(null, dataSansEmptyset);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} inputBase64
|
||||
* @returns {[Uint8Array, SearchTree]}
|
||||
*/
|
||||
function makeSearchTreeFromBase64(inputBase64) {
|
||||
const input = makeUint8ArrayFromBase64(inputBase64);
|
||||
let i = 0;
|
||||
const l = input.length;
|
||||
/** @type {HashTable<SearchTree>} */
|
||||
const stash = new HashTable();
|
||||
const hash = Uint8Array.of(0, 0, 0, 0, 0, 0, 0, 0);
|
||||
const truncatedHash = new Uint8Array(hash.buffer, 2, 6);
|
||||
// used for handling compressed (that is, relative-offset) nodes
|
||||
/** @type {{hash: Uint8Array, used: boolean}[]} */
|
||||
const hash_history = [];
|
||||
/** @type {Uint8Array[]} */
|
||||
const data_history = [];
|
||||
let canonical = EMPTY_UINT8;
|
||||
/** @type {SearchTree} */
|
||||
let tree = new SearchTree(
|
||||
EMPTY_SEARCH_TREE_BRANCHES,
|
||||
EMPTY_SEARCH_TREE_BRANCHES,
|
||||
EMPTY_UINT8,
|
||||
EMPTY_BITMAP,
|
||||
EMPTY_BITMAP,
|
||||
);
|
||||
/**
|
||||
* @param {Uint8Array} input
|
||||
* @param {number} i
|
||||
* @param {number} compression_tag
|
||||
* @returns {{
|
||||
* "cpbranches": Uint8Array,
|
||||
* "csbranches": Uint8Array,
|
||||
* "might_have_prefix_branches": SearchTreeBranches,
|
||||
* "branches": SearchTreeBranches,
|
||||
* "cpnodes": Uint8Array,
|
||||
* "csnodes": Uint8Array,
|
||||
* "consumed_len_bytes": number,
|
||||
* }}
|
||||
*/
|
||||
function makeBranchesFromBinaryData(
|
||||
input,
|
||||
i,
|
||||
compression_tag,
|
||||
) {
|
||||
const is_pure_suffixes_only_node = (compression_tag & 0x01) !== 0x00;
|
||||
const is_stack_compressed = (compression_tag & 0x02) !== 0;
|
||||
const is_long_compressed = (compression_tag & 0x04) !== 0;
|
||||
const all_children_are_compressed =
|
||||
(compression_tag & 0xF0) === 0xF0 && !is_long_compressed;
|
||||
const any_children_are_compressed =
|
||||
(compression_tag & 0xF0) !== 0x00 || is_long_compressed;
|
||||
const start_point = i;
|
||||
let cplen;
|
||||
let cslen;
|
||||
let alphabitmap = null;
|
||||
if (is_pure_suffixes_only_node) {
|
||||
cplen = 0;
|
||||
cslen = input[i];
|
||||
i += 1;
|
||||
if (cslen >= 0xc0) {
|
||||
alphabitmap = SearchTreeBranchesLongAlphaBitmap;
|
||||
cslen = cslen & 0x3F;
|
||||
} else if (cslen >= 0x80) {
|
||||
alphabitmap = SearchTreeBranchesShortAlphaBitmap;
|
||||
cslen = cslen & 0x7F;
|
||||
}
|
||||
} else {
|
||||
cplen = input[i];
|
||||
i += 1;
|
||||
cslen = input[i];
|
||||
i += 1;
|
||||
if (cplen === 0xff && cslen === 0xff) {
|
||||
cplen = 0x100;
|
||||
cslen = 0;
|
||||
} else if (cplen >= 0xc0 && cslen >= 0xc0) {
|
||||
alphabitmap = SearchTreeBranchesLongAlphaBitmap;
|
||||
cplen = cplen & 0x3F;
|
||||
cslen = cslen & 0x3F;
|
||||
} else if (cplen >= 0x80 && cslen >= 0x80) {
|
||||
alphabitmap = SearchTreeBranchesShortAlphaBitmap;
|
||||
cplen = cplen & 0x7F;
|
||||
cslen = cslen & 0x7F;
|
||||
}
|
||||
}
|
||||
let j = 0;
|
||||
/** @type {Uint8Array} */
|
||||
let cpnodes;
|
||||
if (any_children_are_compressed) {
|
||||
cpnodes = cplen === 0 ? EMPTY_UINT8 : new Uint8Array(cplen * 6);
|
||||
while (j < cplen) {
|
||||
const is_compressed = all_children_are_compressed ||
|
||||
((0x10 << j) & compression_tag) !== 0;
|
||||
if (is_compressed) {
|
||||
let slot = hash_history.length - 1;
|
||||
if (is_stack_compressed) {
|
||||
while (hash_history[slot].used) {
|
||||
slot -= 1;
|
||||
}
|
||||
} else {
|
||||
slot -= input[i];
|
||||
i += 1;
|
||||
}
|
||||
hash_history[slot].used = true;
|
||||
cpnodes.set(
|
||||
hash_history[slot].hash,
|
||||
j * 6,
|
||||
);
|
||||
} else {
|
||||
const joff = j * 6;
|
||||
cpnodes[joff + 0] = input[i + 0];
|
||||
cpnodes[joff + 1] = input[i + 1];
|
||||
cpnodes[joff + 2] = input[i + 2];
|
||||
cpnodes[joff + 3] = input[i + 3];
|
||||
cpnodes[joff + 4] = input[i + 4];
|
||||
cpnodes[joff + 5] = input[i + 5];
|
||||
i += 6;
|
||||
}
|
||||
j += 1;
|
||||
}
|
||||
} else {
|
||||
cpnodes = cplen === 0 ? EMPTY_UINT8 : input.subarray(i, i + (cplen * 6));
|
||||
i += cplen * 6;
|
||||
}
|
||||
j = 0;
|
||||
/** @type {Uint8Array} */
|
||||
let csnodes;
|
||||
if (any_children_are_compressed) {
|
||||
csnodes = cslen === 0 ? EMPTY_UINT8 : new Uint8Array(cslen * 6);
|
||||
while (j < cslen) {
|
||||
const is_compressed = all_children_are_compressed ||
|
||||
((0x10 << (cplen + j)) & compression_tag) !== 0;
|
||||
if (is_compressed) {
|
||||
let slot = hash_history.length - 1;
|
||||
if (is_stack_compressed) {
|
||||
while (hash_history[slot].used) {
|
||||
slot -= 1;
|
||||
}
|
||||
} else {
|
||||
slot -= input[i];
|
||||
i += 1;
|
||||
}
|
||||
hash_history[slot].used = true;
|
||||
csnodes.set(
|
||||
hash_history[slot].hash,
|
||||
j * 6,
|
||||
);
|
||||
} else {
|
||||
const joff = j * 6;
|
||||
csnodes[joff + 0] = input[i + 0];
|
||||
csnodes[joff + 1] = input[i + 1];
|
||||
csnodes[joff + 2] = input[i + 2];
|
||||
csnodes[joff + 3] = input[i + 3];
|
||||
csnodes[joff + 4] = input[i + 4];
|
||||
csnodes[joff + 5] = input[i + 5];
|
||||
i += 6;
|
||||
}
|
||||
j += 1;
|
||||
}
|
||||
} else {
|
||||
csnodes = cslen === 0 ? EMPTY_UINT8 : input.subarray(i, i + (cslen * 6));
|
||||
i += cslen * 6;
|
||||
}
|
||||
let cpbranches;
|
||||
let might_have_prefix_branches;
|
||||
if (cplen === 0) {
|
||||
cpbranches = EMPTY_UINT8;
|
||||
might_have_prefix_branches = EMPTY_SEARCH_TREE_BRANCHES;
|
||||
} else if (alphabitmap) {
|
||||
cpbranches = new Uint8Array(input.buffer, i + input.byteOffset, alphabitmap.width);
|
||||
const branchset = (alphabitmap.width === 4 ? (input[i + 3] << 24) : 0) |
|
||||
(input[i + 2] << 16) |
|
||||
(input[i + 1] << 8) |
|
||||
input[i];
|
||||
might_have_prefix_branches = new alphabitmap(branchset, cpnodes);
|
||||
i += alphabitmap.width;
|
||||
} else {
|
||||
cpbranches = new Uint8Array(input.buffer, i + input.byteOffset, cplen);
|
||||
might_have_prefix_branches = new SearchTreeBranchesArray(cpbranches, cpnodes);
|
||||
i += cplen;
|
||||
}
|
||||
let csbranches;
|
||||
let branches;
|
||||
if (cslen === 0) {
|
||||
csbranches = EMPTY_UINT8;
|
||||
branches = might_have_prefix_branches;
|
||||
} else if (alphabitmap) {
|
||||
csbranches = new Uint8Array(input.buffer, i + input.byteOffset, alphabitmap.width);
|
||||
const branchset = (alphabitmap.width === 4 ? (input[i + 3] << 24) : 0) |
|
||||
(input[i + 2] << 16) |
|
||||
(input[i + 1] << 8) |
|
||||
input[i];
|
||||
if (cplen === 0) {
|
||||
branches = new alphabitmap(branchset, csnodes);
|
||||
} else {
|
||||
const cpoffset = i - alphabitmap.width;
|
||||
const cpbranchset =
|
||||
(alphabitmap.width === 4 ? (input[cpoffset + 3] << 24) : 0) |
|
||||
(input[cpoffset + 2] << 16) |
|
||||
(input[cpoffset + 1] << 8) |
|
||||
input[cpoffset];
|
||||
const hashes = new Uint8Array((cplen + cslen) * 6);
|
||||
let cpi = 0;
|
||||
let csi = 0;
|
||||
let j = 0;
|
||||
for (let k = 0; k < alphabitmap.ALPHABITMAP_CHARS.length; k += 1) {
|
||||
if (branchset & (1 << k)) {
|
||||
hashes[j + 0] = csnodes[csi + 0];
|
||||
hashes[j + 1] = csnodes[csi + 1];
|
||||
hashes[j + 2] = csnodes[csi + 2];
|
||||
hashes[j + 3] = csnodes[csi + 3];
|
||||
hashes[j + 4] = csnodes[csi + 4];
|
||||
hashes[j + 5] = csnodes[csi + 5];
|
||||
j += 6;
|
||||
csi += 6;
|
||||
} else if (cpbranchset & (1 << k)) {
|
||||
hashes[j + 0] = cpnodes[cpi + 0];
|
||||
hashes[j + 1] = cpnodes[cpi + 1];
|
||||
hashes[j + 2] = cpnodes[cpi + 2];
|
||||
hashes[j + 3] = cpnodes[cpi + 3];
|
||||
hashes[j + 4] = cpnodes[cpi + 4];
|
||||
hashes[j + 5] = cpnodes[cpi + 5];
|
||||
j += 6;
|
||||
cpi += 6;
|
||||
}
|
||||
}
|
||||
branches = new alphabitmap(branchset | cpbranchset, hashes);
|
||||
}
|
||||
i += alphabitmap.width;
|
||||
} else {
|
||||
csbranches = new Uint8Array(input.buffer, i + input.byteOffset, cslen);
|
||||
if (cplen === 0) {
|
||||
branches = new SearchTreeBranchesArray(csbranches, csnodes);
|
||||
} else {
|
||||
const branchset = new Uint8Array(cplen + cslen);
|
||||
const hashes = new Uint8Array((cplen + cslen) * 6);
|
||||
let cpi = 0;
|
||||
let csi = 0;
|
||||
let j = 0;
|
||||
while (cpi < cplen || csi < cslen) {
|
||||
if (cpi >= cplen || (csi < cslen && cpbranches[cpi] > csbranches[csi])) {
|
||||
branchset[j] = csbranches[csi];
|
||||
const joff = j * 6;
|
||||
const csioff = csi * 6;
|
||||
hashes[joff + 0] = csnodes[csioff + 0];
|
||||
hashes[joff + 1] = csnodes[csioff + 1];
|
||||
hashes[joff + 2] = csnodes[csioff + 2];
|
||||
hashes[joff + 3] = csnodes[csioff + 3];
|
||||
hashes[joff + 4] = csnodes[csioff + 4];
|
||||
hashes[joff + 5] = csnodes[csioff + 5];
|
||||
csi += 1;
|
||||
} else {
|
||||
branchset[j] = cpbranches[cpi];
|
||||
const joff = j * 6;
|
||||
const cpioff = cpi * 6;
|
||||
hashes[joff + 0] = cpnodes[cpioff + 0];
|
||||
hashes[joff + 1] = cpnodes[cpioff + 1];
|
||||
hashes[joff + 2] = cpnodes[cpioff + 2];
|
||||
hashes[joff + 3] = cpnodes[cpioff + 3];
|
||||
hashes[joff + 4] = cpnodes[cpioff + 4];
|
||||
hashes[joff + 5] = cpnodes[cpioff + 5];
|
||||
cpi += 1;
|
||||
}
|
||||
j += 1;
|
||||
}
|
||||
branches = new SearchTreeBranchesArray(branchset, hashes);
|
||||
}
|
||||
i += cslen;
|
||||
}
|
||||
return {
|
||||
consumed_len_bytes: i - start_point,
|
||||
cpbranches,
|
||||
csbranches,
|
||||
cpnodes,
|
||||
csnodes,
|
||||
branches,
|
||||
might_have_prefix_branches,
|
||||
};
|
||||
}
|
||||
while (i < l) {
|
||||
const start = i;
|
||||
let data;
|
||||
// compression_tag = 1 means pure-suffixes-only,
|
||||
// which is not considered "compressed" for the purposes of this loop
|
||||
// because that's the canonical, hashed version of the data
|
||||
let compression_tag = input[i];
|
||||
const is_pure_suffixes_only_node = (compression_tag & 0x01) !== 0;
|
||||
if (compression_tag > 1) {
|
||||
// compressed node
|
||||
const is_long_compressed = (compression_tag & 0x04) !== 0;
|
||||
const is_data_compressed = (compression_tag & 0x08) !== 0;
|
||||
i += 1;
|
||||
if (is_long_compressed) {
|
||||
compression_tag |= input[i] << 8;
|
||||
i += 1;
|
||||
compression_tag |= input[i] << 16;
|
||||
i += 1;
|
||||
}
|
||||
let dlen = input[i];
|
||||
i += 1;
|
||||
if (is_data_compressed) {
|
||||
data = data_history[data_history.length - dlen - 1];
|
||||
dlen = data.length;
|
||||
} else {
|
||||
data = dlen === 0 ?
|
||||
EMPTY_UINT8 :
|
||||
new Uint8Array(input.buffer, i + input.byteOffset, dlen);
|
||||
i += dlen;
|
||||
}
|
||||
const coffset = i;
|
||||
const {
|
||||
cpbranches,
|
||||
csbranches,
|
||||
cpnodes,
|
||||
csnodes,
|
||||
consumed_len_bytes: branches_consumed_len_bytes,
|
||||
branches,
|
||||
might_have_prefix_branches,
|
||||
} = makeBranchesFromBinaryData(input, i, compression_tag);
|
||||
i += branches_consumed_len_bytes;
|
||||
let whole;
|
||||
let suffix;
|
||||
if (is_pure_suffixes_only_node) {
|
||||
whole = EMPTY_BITMAP;
|
||||
suffix = input[i] === 0 ?
|
||||
EMPTY_BITMAP1 :
|
||||
new RoaringBitmap(input, i);
|
||||
i += suffix.consumed_len_bytes;
|
||||
} else if (input[i] === 0xff) {
|
||||
whole = EMPTY_BITMAP;
|
||||
suffix = EMPTY_BITMAP1;
|
||||
i += 1;
|
||||
} else {
|
||||
whole = input[i] === 0 ?
|
||||
EMPTY_BITMAP1 :
|
||||
new RoaringBitmap(input, i);
|
||||
i += whole.consumed_len_bytes;
|
||||
suffix = input[i] === 0 ?
|
||||
EMPTY_BITMAP1 :
|
||||
new RoaringBitmap(input, i);
|
||||
i += suffix.consumed_len_bytes;
|
||||
}
|
||||
tree = new SearchTree(
|
||||
branches,
|
||||
might_have_prefix_branches,
|
||||
data,
|
||||
whole,
|
||||
suffix,
|
||||
);
|
||||
const clen = (
|
||||
(is_pure_suffixes_only_node ? 3 : 4) + // lengths of children and data
|
||||
dlen +
|
||||
cpnodes.length + csnodes.length +
|
||||
cpbranches.length + csbranches.length +
|
||||
whole.consumed_len_bytes +
|
||||
suffix.consumed_len_bytes
|
||||
);
|
||||
if (canonical.length < clen) {
|
||||
canonical = new Uint8Array(clen);
|
||||
}
|
||||
let ci = 0;
|
||||
canonical[ci] = is_pure_suffixes_only_node ? 1 : 0;
|
||||
ci += 1;
|
||||
canonical[ci] = dlen;
|
||||
ci += 1;
|
||||
canonical.set(data, ci);
|
||||
ci += dlen;
|
||||
canonical[ci] = input[coffset];
|
||||
ci += 1;
|
||||
if (!is_pure_suffixes_only_node) {
|
||||
canonical[ci] = input[coffset + 1];
|
||||
ci += 1;
|
||||
}
|
||||
canonical.set(cpnodes, ci);
|
||||
ci += cpnodes.length;
|
||||
canonical.set(csnodes, ci);
|
||||
ci += csnodes.length;
|
||||
canonical.set(cpbranches, ci);
|
||||
ci += cpbranches.length;
|
||||
canonical.set(csbranches, ci);
|
||||
ci += csbranches.length;
|
||||
const leavesOffset = i - whole.consumed_len_bytes - suffix.consumed_len_bytes;
|
||||
for (let j = leavesOffset; j < i; j += 1) {
|
||||
canonical[ci + j - leavesOffset] = input[j];
|
||||
}
|
||||
siphashOfBytes(canonical.subarray(0, clen), 0, 0, 0, 0, hash);
|
||||
hash[2] &= 0x7f;
|
||||
} else {
|
||||
// uncompressed node
|
||||
const dlen = input [i + 1];
|
||||
i += 2;
|
||||
if (dlen === 0) {
|
||||
data = EMPTY_UINT8;
|
||||
} else {
|
||||
data = new Uint8Array(input.buffer, i + input.byteOffset, dlen);
|
||||
}
|
||||
i += dlen;
|
||||
const {
|
||||
consumed_len_bytes: branches_consumed_len_bytes,
|
||||
branches,
|
||||
might_have_prefix_branches,
|
||||
} = makeBranchesFromBinaryData(input, i, compression_tag);
|
||||
i += branches_consumed_len_bytes;
|
||||
let whole;
|
||||
let suffix;
|
||||
if (is_pure_suffixes_only_node) {
|
||||
whole = EMPTY_BITMAP;
|
||||
suffix = input[i] === 0 ?
|
||||
EMPTY_BITMAP1 :
|
||||
new RoaringBitmap(input, i);
|
||||
i += suffix.consumed_len_bytes;
|
||||
} else if (input[i] === 0xff) {
|
||||
whole = EMPTY_BITMAP;
|
||||
suffix = EMPTY_BITMAP;
|
||||
i += 1;
|
||||
} else {
|
||||
whole = input[i] === 0 ?
|
||||
EMPTY_BITMAP1 :
|
||||
new RoaringBitmap(input, i);
|
||||
i += whole.consumed_len_bytes;
|
||||
suffix = input[i] === 0 ?
|
||||
EMPTY_BITMAP1 :
|
||||
new RoaringBitmap(input, i);
|
||||
i += suffix.consumed_len_bytes;
|
||||
}
|
||||
siphashOfBytes(new Uint8Array(
|
||||
input.buffer,
|
||||
start + input.byteOffset,
|
||||
i - start,
|
||||
), 0, 0, 0, 0, hash);
|
||||
hash[2] &= 0x7f;
|
||||
tree = new SearchTree(
|
||||
branches,
|
||||
might_have_prefix_branches,
|
||||
data,
|
||||
whole,
|
||||
suffix,
|
||||
);
|
||||
}
|
||||
hash_history.push({hash: truncatedHash.slice(), used: false});
|
||||
if (data.length !== 0) {
|
||||
data_history.push(data);
|
||||
}
|
||||
const tree_branch_nodeids = tree.branches.nodeids;
|
||||
const tree_branch_subtrees = tree.branches.subtrees;
|
||||
let j = 0;
|
||||
let lb = tree.branches.subtrees.length;
|
||||
while (j < lb) {
|
||||
// node id with a 1 in its most significant bit is inlined, and, so
|
||||
// it won't be in the stash
|
||||
if ((tree_branch_nodeids[j * 6] & 0x80) === 0) {
|
||||
const subtree = stash.getWithOffsetKey(tree_branch_nodeids, j * 6);
|
||||
if (subtree !== undefined) {
|
||||
tree_branch_subtrees[j] = Promise.resolve(subtree);
|
||||
}
|
||||
}
|
||||
j += 1;
|
||||
}
|
||||
const tree_mhp_branch_nodeids = tree.might_have_prefix_branches.nodeids;
|
||||
const tree_mhp_branch_subtrees = tree.might_have_prefix_branches.subtrees;
|
||||
j = 0;
|
||||
lb = tree.might_have_prefix_branches.subtrees.length;
|
||||
while (j < lb) {
|
||||
// node id with a 1 in its most significant bit is inlined, and, so
|
||||
// it won't be in the stash
|
||||
if ((tree_mhp_branch_nodeids[j * 6] & 0x80) === 0) {
|
||||
const subtree = stash.getWithOffsetKey(tree_mhp_branch_nodeids, j * 6);
|
||||
if (subtree !== undefined) {
|
||||
tree_mhp_branch_subtrees[j] = Promise.resolve(subtree);
|
||||
}
|
||||
}
|
||||
j += 1;
|
||||
}
|
||||
if (i !== l) {
|
||||
stash.set(truncatedHash, tree);
|
||||
}
|
||||
}
|
||||
return [truncatedHash, tree];
|
||||
}
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
registry.searchTreeRootCallback = (error, data) => {
|
||||
if (data) {
|
||||
resolve(data);
|
||||
} else {
|
||||
reject(error);
|
||||
}
|
||||
};
|
||||
hooks.loadRoot(callbacks);
|
||||
});
|
||||
}
|
||||
|
||||
if (typeof window !== "undefined") {
|
||||
window.Stringdex = {
|
||||
loadDatabase,
|
||||
};
|
||||
window.RoaringBitmap = RoaringBitmap;
|
||||
if (window.StringdexOnload) {
|
||||
window.StringdexOnload.forEach(cb => cb(window.Stringdex));
|
||||
}
|
||||
} else {
|
||||
/** @type {stringdex.Stringdex} */
|
||||
// eslint-disable-next-line no-undef
|
||||
module.exports.Stringdex = {
|
||||
loadDatabase,
|
||||
};
|
||||
/** @type {stringdex.RoaringBitmap} */
|
||||
// eslint-disable-next-line no-undef
|
||||
module.exports.RoaringBitmap = RoaringBitmap;
|
||||
}
|
||||
|
||||
// eslint-disable-next-line max-len
|
||||
// polyfill https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Uint8Array/fromBase64
|
||||
/**
|
||||
* @type {function(string): Uint8Array} base64
|
||||
*/
|
||||
//@ts-expect-error
|
||||
const makeUint8ArrayFromBase64 = Uint8Array.fromBase64 ? Uint8Array.fromBase64 : (string => {
|
||||
const bytes_as_string = atob(string);
|
||||
const l = bytes_as_string.length;
|
||||
const bytes = new Uint8Array(l);
|
||||
for (let i = 0; i < l; ++i) {
|
||||
bytes[i] = bytes_as_string.charCodeAt(i);
|
||||
}
|
||||
return bytes;
|
||||
});
|
||||
/**
|
||||
* @type {function(string): Uint8Array} base64
|
||||
*/
|
||||
//@ts-expect-error
|
||||
const makeUint8ArrayFromHex = Uint8Array.fromHex ? Uint8Array.fromHex : (string => {
|
||||
/** @type {Object<string, number>} */
|
||||
const alpha = {
|
||||
"0": 0, "1": 1,
|
||||
"2": 2, "3": 3,
|
||||
"4": 4, "5": 5,
|
||||
"6": 6, "7": 7,
|
||||
"8": 8, "9": 9,
|
||||
"a": 10, "b": 11,
|
||||
"A": 10, "B": 11,
|
||||
"c": 12, "d": 13,
|
||||
"C": 12, "D": 13,
|
||||
"e": 14, "f": 15,
|
||||
"E": 14, "F": 15,
|
||||
};
|
||||
const l = string.length >> 1;
|
||||
const bytes = new Uint8Array(l);
|
||||
for (let i = 0; i < l; i += 1) {
|
||||
const top = string[i << 1];
|
||||
const bottom = string[(i << 1) + 1];
|
||||
bytes[i] = (alpha[top] << 4) | alpha[bottom];
|
||||
}
|
||||
return bytes;
|
||||
});
|
||||
|
||||
/**
|
||||
* @type {function(Uint8Array): string} base64
|
||||
*/
|
||||
//@ts-expect-error
|
||||
const makeHexFromUint8Array = Uint8Array.prototype.toHex ? (array => array.toHex()) : (array => {
|
||||
/** @type {string[]} */
|
||||
const alpha = [
|
||||
"0", "1",
|
||||
"2", "3",
|
||||
"4", "5",
|
||||
"6", "7",
|
||||
"8", "9",
|
||||
"a", "b",
|
||||
"c", "d",
|
||||
"e", "f",
|
||||
];
|
||||
const l = array.length;
|
||||
const v = [];
|
||||
for (let i = 0; i < l; ++i) {
|
||||
v.push(alpha[array[i] >> 4]);
|
||||
v.push(alpha[array[i] & 0xf]);
|
||||
}
|
||||
return v.join("");
|
||||
});
|
||||
|
||||
//////////////
|
||||
|
||||
/**
|
||||
* SipHash 1-3
|
||||
* @param {Uint8Array} input data to be hashed; all codepoints in string should be less than 256
|
||||
* @param {number} k0lo first word of key
|
||||
* @param {number} k0hi second word of key
|
||||
* @param {number} k1lo third word of key
|
||||
* @param {number} k1hi fourth word of key
|
||||
* @param {Uint8Array} output the data to write (clobber the first eight bytes)
|
||||
*/
|
||||
function siphashOfBytes(input, k0lo, k0hi, k1lo, k1hi, output) {
|
||||
// hash state
|
||||
// While siphash uses 64 bit state, js only has native support
|
||||
// for 32 bit numbers. BigInt, unfortunately, doesn't count.
|
||||
// It's too slow.
|
||||
let v0lo = k0lo ^ 0x70736575;
|
||||
let v0hi = k0hi ^ 0x736f6d65;
|
||||
let v1lo = k1lo ^ 0x6e646f6d;
|
||||
let v1hi = k1hi ^ 0x646f7261;
|
||||
let v2lo = k0lo ^ 0x6e657261;
|
||||
let v2hi = k0hi ^ 0x6c796765;
|
||||
let v3lo = k1lo ^ 0x79746573;
|
||||
let v3hi = k1hi ^ 0x74656462;
|
||||
const inputLength = input.length;
|
||||
let inputI = 0;
|
||||
// main hash loop
|
||||
const left = inputLength & 0x7;
|
||||
let milo = 0;
|
||||
let mihi = 0;
|
||||
while (inputI < inputLength - left) {
|
||||
u8ToU64le(inputI, inputI + 8);
|
||||
v3lo ^= milo;
|
||||
v3hi ^= mihi;
|
||||
siphashCompress();
|
||||
v0lo ^= milo;
|
||||
v0hi ^= mihi;
|
||||
inputI += 8;
|
||||
}
|
||||
u8ToU64le(inputI, inputI + left);
|
||||
// finish
|
||||
const blo = milo;
|
||||
const bhi = ((inputLength & 0xff) << 24) | mihi;
|
||||
v3lo ^= blo;
|
||||
v3hi ^= bhi;
|
||||
siphashCompress();
|
||||
v0lo ^= blo;
|
||||
v0hi ^= bhi;
|
||||
v2lo ^= 0xff;
|
||||
siphashCompress();
|
||||
siphashCompress();
|
||||
siphashCompress();
|
||||
output[7] = (v0lo ^ v1lo ^ v2lo ^ v3lo) & 0xff;
|
||||
output[6] = (v0lo ^ v1lo ^ v2lo ^ v3lo) >>> 8;
|
||||
output[5] = (v0lo ^ v1lo ^ v2lo ^ v3lo) >>> 16;
|
||||
output[4] = (v0lo ^ v1lo ^ v2lo ^ v3lo) >>> 24;
|
||||
output[3] = (v0hi ^ v1hi ^ v2hi ^ v3hi) & 0xff;
|
||||
output[2] = (v0hi ^ v1hi ^ v2hi ^ v3hi) >>> 8;
|
||||
output[1] = (v0hi ^ v1hi ^ v2hi ^ v3hi) >>> 16;
|
||||
output[0] = (v0hi ^ v1hi ^ v2hi ^ v3hi) >>> 24;
|
||||
/**
|
||||
* Convert eight bytes to a single 64-bit number
|
||||
* @param {number} offset
|
||||
* @param {number} length
|
||||
*/
|
||||
function u8ToU64le(offset, length) {
|
||||
const n0 = offset < length ? input[offset] & 0xff : 0;
|
||||
const n1 = offset + 1 < length ? input[offset + 1] & 0xff : 0;
|
||||
const n2 = offset + 2 < length ? input[offset + 2] & 0xff : 0;
|
||||
const n3 = offset + 3 < length ? input[offset + 3] & 0xff : 0;
|
||||
const n4 = offset + 4 < length ? input[offset + 4] & 0xff : 0;
|
||||
const n5 = offset + 5 < length ? input[offset + 5] & 0xff : 0;
|
||||
const n6 = offset + 6 < length ? input[offset + 6] & 0xff : 0;
|
||||
const n7 = offset + 7 < length ? input[offset + 7] & 0xff : 0;
|
||||
milo = n0 | (n1 << 8) | (n2 << 16) | (n3 << 24);
|
||||
mihi = n4 | (n5 << 8) | (n6 << 16) | (n7 << 24);
|
||||
}
|
||||
function siphashCompress() {
|
||||
// v0 += v1;
|
||||
v0hi = (v0hi + v1hi + (((v0lo >>> 0) + (v1lo >>> 0) > 0xffffffff) ? 1 : 0)) | 0;
|
||||
v0lo = (v0lo + v1lo) | 0;
|
||||
// rotl(v1, 13)
|
||||
let v1lo_ = v1lo;
|
||||
let v1hi_ = v1hi;
|
||||
v1lo = (v1lo_ << 13) | (v1hi_ >>> 19);
|
||||
v1hi = (v1hi_ << 13) | (v1lo_ >>> 19);
|
||||
// v1 ^= v0
|
||||
v1lo ^= v0lo;
|
||||
v1hi ^= v0hi;
|
||||
// rotl(v0, 32)
|
||||
const v0lo_ = v0lo;
|
||||
const v0hi_ = v0hi;
|
||||
v0lo = v0hi_;
|
||||
v0hi = v0lo_;
|
||||
// v2 += v3
|
||||
v2hi = (v2hi + v3hi + (((v2lo >>> 0) + (v3lo >>> 0) > 0xffffffff) ? 1 : 0)) | 0;
|
||||
v2lo = (v2lo + v3lo) | 0;
|
||||
// rotl(v3, 16)
|
||||
let v3lo_ = v3lo;
|
||||
let v3hi_ = v3hi;
|
||||
v3lo = (v3lo_ << 16) | (v3hi_ >>> 16);
|
||||
v3hi = (v3hi_ << 16) | (v3lo_ >>> 16);
|
||||
// v3 ^= v2
|
||||
v3lo ^= v2lo;
|
||||
v3hi ^= v2hi;
|
||||
// v0 += v3
|
||||
v0hi = (v0hi + v3hi + (((v0lo >>> 0) + (v3lo >>> 0) > 0xffffffff) ? 1 : 0)) | 0;
|
||||
v0lo = (v0lo + v3lo) | 0;
|
||||
// rotl(v3, 21)
|
||||
v3lo_ = v3lo;
|
||||
v3hi_ = v3hi;
|
||||
v3lo = (v3lo_ << 21) | (v3hi_ >>> 11);
|
||||
v3hi = (v3hi_ << 21) | (v3lo_ >>> 11);
|
||||
// v3 ^= v0
|
||||
v3lo ^= v0lo;
|
||||
v3hi ^= v0hi;
|
||||
// v2 += v1
|
||||
v2hi = (v2hi + v1hi + (((v2lo >>> 0) + (v1lo >>> 0) > 0xffffffff) ? 1 : 0)) | 0;
|
||||
v2lo = (v2lo + v1lo) | 0;
|
||||
// rotl(v1, 17)
|
||||
v1lo_ = v1lo;
|
||||
v1hi_ = v1hi;
|
||||
v1lo = (v1lo_ << 17) | (v1hi_ >>> 15);
|
||||
v1hi = (v1hi_ << 17) | (v1lo_ >>> 15);
|
||||
// v1 ^= v2
|
||||
v1lo ^= v2lo;
|
||||
v1hi ^= v2hi;
|
||||
// rotl(v2, 32)
|
||||
const v2lo_ = v2lo;
|
||||
const v2hi_ = v2hi;
|
||||
v2lo = v2hi_;
|
||||
v2hi = v2lo_;
|
||||
}
|
||||
}
|
||||
|
||||
//////////////
|
||||
|
||||
|
||||
// Parts of this code are based on Lucene, which is licensed under the
|
||||
// Apache/2.0 license.
|
||||
// More information found here:
|
||||
// https://fossies.org/linux/lucene/lucene/core/src/java/org/apache/lucene/util/automaton/
|
||||
// LevenshteinAutomata.java
|
||||
class ParametricDescription {
|
||||
/**
|
||||
* @param {number} w
|
||||
* @param {number} n
|
||||
* @param {Int32Array} minErrors
|
||||
*/
|
||||
constructor(w, n, minErrors) {
|
||||
this.w = w;
|
||||
this.n = n;
|
||||
this.minErrors = minErrors;
|
||||
}
|
||||
/**
|
||||
* @param {number} absState
|
||||
* @returns {boolean}
|
||||
*/
|
||||
isAccept(absState) {
|
||||
const state = Math.floor(absState / (this.w + 1));
|
||||
const offset = absState % (this.w + 1);
|
||||
return this.w - offset + this.minErrors[state] <= this.n;
|
||||
}
|
||||
/**
|
||||
* @param {number} absState
|
||||
* @returns {number}
|
||||
*/
|
||||
getPosition(absState) {
|
||||
return absState % (this.w + 1);
|
||||
}
|
||||
/**
|
||||
* @param {Uint8Array} name
|
||||
* @param {number} charCode
|
||||
* @param {number} pos
|
||||
* @param {number} end
|
||||
* @returns {number}
|
||||
*/
|
||||
getVector(name, charCode, pos, end) {
|
||||
let vector = 0;
|
||||
for (let i = pos; i < end; i += 1) {
|
||||
vector = vector << 1;
|
||||
if (name[i] === charCode) {
|
||||
vector |= 1;
|
||||
}
|
||||
}
|
||||
return vector;
|
||||
}
|
||||
/**
|
||||
* @param {Int32Array} data
|
||||
* @param {number} index
|
||||
* @param {number} bitsPerValue
|
||||
* @returns {number}
|
||||
*/
|
||||
unpack(data, index, bitsPerValue) {
|
||||
const bitLoc = (bitsPerValue * index);
|
||||
const dataLoc = bitLoc >> 5;
|
||||
const bitStart = bitLoc & 31;
|
||||
if (bitStart + bitsPerValue <= 32) {
|
||||
// not split
|
||||
return ((data[dataLoc] >> bitStart) & this.MASKS[bitsPerValue - 1]);
|
||||
} else {
|
||||
// split
|
||||
const part = 32 - bitStart;
|
||||
return ~~(((data[dataLoc] >> bitStart) & this.MASKS[part - 1]) +
|
||||
((data[1 + dataLoc] & this.MASKS[bitsPerValue - part - 1]) << part));
|
||||
}
|
||||
}
|
||||
}
|
||||
ParametricDescription.prototype.MASKS = new Int32Array([
|
||||
0x1, 0x3, 0x7, 0xF,
|
||||
0x1F, 0x3F, 0x7F, 0xFF,
|
||||
0x1FF, 0x3F, 0x7FF, 0xFFF,
|
||||
0x1FFF, 0x3FFF, 0x7FFF, 0xFFFF,
|
||||
0x1FFFF, 0x3FFFF, 0x7FFFF, 0xFFFFF,
|
||||
0x1FFFFF, 0x3FFFFF, 0x7FFFFF, 0xFFFFFF,
|
||||
0x1FFFFFF, 0x3FFFFFF, 0x7FFFFFF, 0xFFFFFFF,
|
||||
0x1FFFFFFF, 0x3FFFFFFF, 0x7FFFFFFF, 0xFFFFFFFF,
|
||||
]);
|
||||
|
||||
// The following code was generated with the moman/finenight pkg
|
||||
// This package is available under the MIT License, see NOTICE.txt
|
||||
// for more details.
|
||||
// This class is auto-generated, Please do not modify it directly.
|
||||
// You should modify the https://gitlab.com/notriddle/createAutomata.py instead.
|
||||
// The following code was generated with the moman/finenight pkg
|
||||
// This package is available under the MIT License, see NOTICE.txt
|
||||
// for more details.
|
||||
// This class is auto-generated, Please do not modify it directly.
|
||||
// You should modify https://gitlab.com/notriddle/moman-rustdoc instead.
|
||||
|
||||
class Lev2TParametricDescription extends ParametricDescription {
|
||||
/**
|
||||
* @param {number} absState
|
||||
* @param {number} position
|
||||
* @param {number} vector
|
||||
* @returns {number}
|
||||
*/
|
||||
transition(absState, position, vector) {
|
||||
let state = Math.floor(absState / (this.w + 1));
|
||||
let offset = absState % (this.w + 1);
|
||||
|
||||
if (position === this.w) {
|
||||
if (state < 3) {
|
||||
const loc = Math.imul(vector, 3) + state;
|
||||
offset += this.unpack(this.offsetIncrs0, loc, 1);
|
||||
state = this.unpack(this.toStates0, loc, 2) - 1;
|
||||
}
|
||||
} else if (position === this.w - 1) {
|
||||
if (state < 5) {
|
||||
const loc = Math.imul(vector, 5) + state;
|
||||
offset += this.unpack(this.offsetIncrs1, loc, 1);
|
||||
state = this.unpack(this.toStates1, loc, 3) - 1;
|
||||
}
|
||||
} else if (position === this.w - 2) {
|
||||
if (state < 13) {
|
||||
const loc = Math.imul(vector, 13) + state;
|
||||
offset += this.unpack(this.offsetIncrs2, loc, 2);
|
||||
state = this.unpack(this.toStates2, loc, 4) - 1;
|
||||
}
|
||||
} else if (position === this.w - 3) {
|
||||
if (state < 28) {
|
||||
const loc = Math.imul(vector, 28) + state;
|
||||
offset += this.unpack(this.offsetIncrs3, loc, 2);
|
||||
state = this.unpack(this.toStates3, loc, 5) - 1;
|
||||
}
|
||||
} else if (position === this.w - 4) {
|
||||
if (state < 45) {
|
||||
const loc = Math.imul(vector, 45) + state;
|
||||
offset += this.unpack(this.offsetIncrs4, loc, 3);
|
||||
state = this.unpack(this.toStates4, loc, 6) - 1;
|
||||
}
|
||||
} else {
|
||||
// eslint-disable-next-line no-lonely-if
|
||||
if (state < 45) {
|
||||
const loc = Math.imul(vector, 45) + state;
|
||||
offset += this.unpack(this.offsetIncrs5, loc, 3);
|
||||
state = this.unpack(this.toStates5, loc, 6) - 1;
|
||||
}
|
||||
}
|
||||
|
||||
if (state === -1) {
|
||||
// null state
|
||||
return -1;
|
||||
} else {
|
||||
// translate back to abs
|
||||
return Math.imul(state, this.w + 1) + offset;
|
||||
}
|
||||
}
|
||||
|
||||
// state map
|
||||
// 0 -> [(0, 0)]
|
||||
// 1 -> [(0, 1)]
|
||||
// 2 -> [(0, 2)]
|
||||
// 3 -> [(0, 1), (1, 1)]
|
||||
// 4 -> [(0, 2), (1, 2)]
|
||||
// 5 -> [(0, 1), (1, 1), (2, 1)]
|
||||
// 6 -> [(0, 2), (1, 2), (2, 2)]
|
||||
// 7 -> [(0, 1), (2, 1)]
|
||||
// 8 -> [(0, 1), (2, 2)]
|
||||
// 9 -> [(0, 2), (2, 1)]
|
||||
// 10 -> [(0, 2), (2, 2)]
|
||||
// 11 -> [t(0, 1), (0, 1), (1, 1), (2, 1)]
|
||||
// 12 -> [t(0, 2), (0, 2), (1, 2), (2, 2)]
|
||||
// 13 -> [(0, 2), (1, 2), (2, 2), (3, 2)]
|
||||
// 14 -> [(0, 1), (1, 1), (3, 2)]
|
||||
// 15 -> [(0, 1), (2, 2), (3, 2)]
|
||||
// 16 -> [(0, 1), (3, 2)]
|
||||
// 17 -> [(0, 1), t(1, 2), (2, 2), (3, 2)]
|
||||
// 18 -> [(0, 2), (1, 2), (3, 1)]
|
||||
// 19 -> [(0, 2), (1, 2), (3, 2)]
|
||||
// 20 -> [(0, 2), (1, 2), t(1, 2), (2, 2), (3, 2)]
|
||||
// 21 -> [(0, 2), (2, 1), (3, 1)]
|
||||
// 22 -> [(0, 2), (2, 2), (3, 2)]
|
||||
// 23 -> [(0, 2), (3, 1)]
|
||||
// 24 -> [(0, 2), (3, 2)]
|
||||
// 25 -> [(0, 2), t(1, 2), (1, 2), (2, 2), (3, 2)]
|
||||
// 26 -> [t(0, 2), (0, 2), (1, 2), (2, 2), (3, 2)]
|
||||
// 27 -> [t(0, 2), (0, 2), (1, 2), (3, 1)]
|
||||
// 28 -> [(0, 2), (1, 2), (2, 2), (3, 2), (4, 2)]
|
||||
// 29 -> [(0, 2), (1, 2), (2, 2), (4, 2)]
|
||||
// 30 -> [(0, 2), (1, 2), (2, 2), t(2, 2), (3, 2), (4, 2)]
|
||||
// 31 -> [(0, 2), (1, 2), (3, 2), (4, 2)]
|
||||
// 32 -> [(0, 2), (1, 2), (4, 2)]
|
||||
// 33 -> [(0, 2), (1, 2), t(1, 2), (2, 2), (3, 2), (4, 2)]
|
||||
// 34 -> [(0, 2), (1, 2), t(2, 2), (2, 2), (3, 2), (4, 2)]
|
||||
// 35 -> [(0, 2), (2, 1), (4, 2)]
|
||||
// 36 -> [(0, 2), (2, 2), (3, 2), (4, 2)]
|
||||
// 37 -> [(0, 2), (2, 2), (4, 2)]
|
||||
// 38 -> [(0, 2), (3, 2), (4, 2)]
|
||||
// 39 -> [(0, 2), (4, 2)]
|
||||
// 40 -> [(0, 2), t(1, 2), (1, 2), (2, 2), (3, 2), (4, 2)]
|
||||
// 41 -> [(0, 2), t(2, 2), (2, 2), (3, 2), (4, 2)]
|
||||
// 42 -> [t(0, 2), (0, 2), (1, 2), (2, 2), (3, 2), (4, 2)]
|
||||
// 43 -> [t(0, 2), (0, 2), (1, 2), (2, 2), (4, 2)]
|
||||
// 44 -> [t(0, 2), (0, 2), (1, 2), (2, 2), t(2, 2), (3, 2), (4, 2)]
|
||||
|
||||
|
||||
/** @param {number} w - length of word being checked */
|
||||
constructor(w) {
|
||||
super(w, 2, new Int32Array([
|
||||
0,1,2,0,1,-1,0,-1,0,-1,0,-1,0,-1,-1,-1,-1,-1,-2,-1,-1,-2,-1,-2,
|
||||
-1,-1,-1,-2,-2,-2,-2,-2,-2,-2,-2,-2,-2,-2,-2,-2,-2,-2,-2,-2,-2,
|
||||
]));
|
||||
}
|
||||
}
|
||||
|
||||
Lev2TParametricDescription.prototype.toStates0 = /*2 bits per value */ new Int32Array([
|
||||
0xe,
|
||||
]);
|
||||
Lev2TParametricDescription.prototype.offsetIncrs0 = /*1 bits per value */ new Int32Array([
|
||||
0x0,
|
||||
]);
|
||||
|
||||
Lev2TParametricDescription.prototype.toStates1 = /*3 bits per value */ new Int32Array([
|
||||
0x1a688a2c,
|
||||
]);
|
||||
Lev2TParametricDescription.prototype.offsetIncrs1 = /*1 bits per value */ new Int32Array([
|
||||
0x3e0,
|
||||
]);
|
||||
|
||||
Lev2TParametricDescription.prototype.toStates2 = /*4 bits per value */ new Int32Array([
|
||||
0x70707054,0xdc07035,0x3dd3a3a,0x2323213a,
|
||||
0x15435223,0x22545432,0x5435,
|
||||
]);
|
||||
Lev2TParametricDescription.prototype.offsetIncrs2 = /*2 bits per value */ new Int32Array([
|
||||
0x80000,0x55582088,0x55555555,0x55,
|
||||
]);
|
||||
|
||||
Lev2TParametricDescription.prototype.toStates3 = /*5 bits per value */ new Int32Array([
|
||||
0x1c0380a4,0x700a570,0xca529c0,0x180a00,
|
||||
0xa80af180,0xc5498e60,0x5a546398,0x8c4300e8,
|
||||
0xac18c601,0xd8d43501,0x863500ad,0x51976d6a,
|
||||
0x8ca0180a,0xc3501ac2,0xb0c5be16,0x76dda8a5,
|
||||
0x18c4519,0xc41294a,0xe248d231,0x1086520c,
|
||||
0xce31ac42,0x13946358,0x2d0348c4,0x6732d494,
|
||||
0x1ad224a5,0xd635ad4b,0x520c4139,0xce24948,
|
||||
0x22110a52,0x58ce729d,0xc41394e3,0x941cc520,
|
||||
0x90e732d4,0x4729d224,0x39ce35ad,
|
||||
]);
|
||||
Lev2TParametricDescription.prototype.offsetIncrs3 = /*2 bits per value */ new Int32Array([
|
||||
0x80000,0xc0c830,0x300f3c30,0x2200fcff,
|
||||
0xcaa00a08,0x3c2200a8,0xa8fea00a,0x55555555,
|
||||
0x55555555,0x55555555,0x55555555,0x55555555,
|
||||
0x55555555,0x55555555,
|
||||
]);
|
||||
|
||||
Lev2TParametricDescription.prototype.toStates4 = /*6 bits per value */ new Int32Array([
|
||||
0x801c0144,0x1453803,0x14700038,0xc0005145,
|
||||
0x1401,0x14,0x140000,0x0,
|
||||
0x510000,0x6301f007,0x301f00d1,0xa186178,
|
||||
0xc20ca0c3,0xc20c30,0xc30030c,0xc00c00cd,
|
||||
0xf0c00c30,0x4c054014,0xc30944c3,0x55150c34,
|
||||
0x8300550,0x430c0143,0x50c31,0xc30850c,
|
||||
0xc3143000,0x50053c50,0x5130d301,0x850d30c2,
|
||||
0x30a08608,0xc214414,0x43142145,0x21450031,
|
||||
0x1400c314,0x4c143145,0x32832803,0x28014d6c,
|
||||
0xcd34a0c3,0x1c50c76,0x1c314014,0x430c30c3,
|
||||
0x1431,0xc300500,0xca00d303,0xd36d0e40,
|
||||
0x90b0e400,0xcb2abb2c,0x70c20ca1,0x2c32ca2c,
|
||||
0xcd2c70cb,0x31c00c00,0x34c2c32c,0x5583280,
|
||||
0x558309b7,0x6cd6ca14,0x430850c7,0x51c51401,
|
||||
0x1430c714,0xc3087,0x71451450,0xca00d30,
|
||||
0xc26dc156,0xb9071560,0x1cb2abb2,0xc70c2144,
|
||||
0xb1c51ca1,0x1421c70c,0xc51c00c3,0x30811c51,
|
||||
0x24324308,0xc51031c2,0x70820820,0x5c33830d,
|
||||
0xc33850c3,0x30c30c30,0xc30c31c,0x451450c3,
|
||||
0x20c20c20,0xda0920d,0x5145914f,0x36596114,
|
||||
0x51965865,0xd9643653,0x365a6590,0x51964364,
|
||||
0x43081505,0x920b2032,0x2c718b28,0xd7242249,
|
||||
0x35cb28b0,0x2cb3872c,0x972c30d7,0xb0c32cb2,
|
||||
0x4e1c75c,0xc80c90c2,0x62ca2482,0x4504171c,
|
||||
0xd65d9610,0x33976585,0xd95cb5d,0x4b5ca5d7,
|
||||
0x73975c36,0x10308138,0xc2245105,0x41451031,
|
||||
0x14e24208,0xc35c3387,0x51453851,0x1c51c514,
|
||||
0xc70c30c3,0x20451450,0x14f1440c,0x4f0da092,
|
||||
0x4513d41,0x6533944d,0x1350e658,0xe1545055,
|
||||
0x64365a50,0x5519383,0x51030815,0x28920718,
|
||||
0x441c718b,0x714e2422,0x1c35cb28,0x4e1c7387,
|
||||
0xb28e1c51,0x5c70c32c,0xc204e1c7,0x81c61440,
|
||||
0x1c62ca24,0xd04503ce,0x85d63944,0x39338e65,
|
||||
0x8e154387,0x364b5ca3,0x38739738,
|
||||
]);
|
||||
Lev2TParametricDescription.prototype.offsetIncrs4 = /*3 bits per value */ new Int32Array([
|
||||
0x10000000,0xc00000,0x60061,0x400,
|
||||
0x0,0x80010008,0x249248a4,0x8229048,
|
||||
0x2092,0x6c3603,0xb61b6c30,0x6db6036d,
|
||||
0xdb6c0,0x361b0180,0x91b72000,0xdb11b71b,
|
||||
0x6db6236,0x1008200,0x12480012,0x24924906,
|
||||
0x48200049,0x80410002,0x24000900,0x4924a489,
|
||||
0x10822492,0x20800125,0x48360,0x9241b692,
|
||||
0x6da4924,0x40009268,0x241b010,0x291b4900,
|
||||
0x6d249249,0x49493423,0x92492492,0x24924924,
|
||||
0x49249249,0x92492492,0x24924924,0x49249249,
|
||||
0x92492492,0x24924924,0x49249249,0x92492492,
|
||||
0x24924924,0x49249249,0x92492492,0x24924924,
|
||||
0x49249249,0x92492492,0x24924924,0x49249249,
|
||||
0x92492492,0x24924924,0x49249249,0x92492492,
|
||||
0x24924924,0x49249249,0x92492492,0x24924924,
|
||||
0x49249249,0x92492492,0x24924924,0x49249249,
|
||||
0x92492492,0x24924924,0x49249249,0x2492,
|
||||
]);
|
||||
|
||||
Lev2TParametricDescription.prototype.toStates5 = /*6 bits per value */ new Int32Array([
|
||||
0x801c0144,0x1453803,0x14700038,0xc0005145,
|
||||
0x1401,0x14,0x140000,0x0,
|
||||
0x510000,0x4e00e007,0xe0051,0x3451451c,
|
||||
0xd015000,0x30cd0000,0xc30c30c,0xc30c30d4,
|
||||
0x40c30c30,0x7c01c014,0xc03458c0,0x185e0c07,
|
||||
0x2830c286,0x830c3083,0xc30030,0x33430c,
|
||||
0x30c3003,0x70051030,0x16301f00,0x8301f00d,
|
||||
0x30a18617,0xc20ca0c,0x431420c3,0xb1450c51,
|
||||
0x14314315,0x4f143145,0x34c05401,0x4c30944c,
|
||||
0x55150c3,0x30830055,0x1430c014,0xc00050c3,
|
||||
0xc30850,0xc314300,0x150053c5,0x25130d30,
|
||||
0x5430d30c,0xc0354154,0x300d0c90,0x1cb2cd0c,
|
||||
0xc91cb0c3,0x72c30cb2,0x14f1cb2c,0xc34c0540,
|
||||
0x34c30944,0x82182214,0x851050c2,0x50851430,
|
||||
0x1400c50c,0x30c5085,0x50c51450,0x150053c,
|
||||
0xc25130d3,0x8850d30,0x1430a086,0x450c2144,
|
||||
0x51cb1c21,0x1c91c70c,0xc71c314b,0x34c1cb1,
|
||||
0x6c328328,0xc328014d,0x76cd34a0,0x1401c50c,
|
||||
0xc31c3140,0x31430c30,0x14,0x30c3005,
|
||||
0xa0ca00d3,0x535b0c,0x4d2830ca,0x514369b3,
|
||||
0xc500d01,0x5965965a,0x30d46546,0x6435030c,
|
||||
0x8034c659,0xdb439032,0x2c390034,0xcaaecb24,
|
||||
0x30832872,0xcb28b1c,0x4b1c32cb,0x70030033,
|
||||
0x30b0cb0c,0xe40ca00d,0x400d36d0,0xb2c90b0e,
|
||||
0xca1cb2ab,0xa2c70c20,0x6575d95c,0x4315b5ce,
|
||||
0x95c53831,0x28034c5d,0x9b705583,0xa1455830,
|
||||
0xc76cd6c,0x40143085,0x71451c51,0x871430c,
|
||||
0x450000c3,0xd3071451,0x1560ca00,0x560c26dc,
|
||||
0xb35b2851,0xc914369,0x1a14500d,0x46593945,
|
||||
0xcb2c939,0x94507503,0x328034c3,0x9b70558,
|
||||
0xe41c5583,0x72caaeca,0x1c308510,0xc7147287,
|
||||
0x50871c32,0x1470030c,0xd307147,0xc1560ca0,
|
||||
0x1560c26d,0xabb2b907,0x21441cb2,0x38a1c70c,
|
||||
0x8e657394,0x314b1c93,0x39438738,0x43083081,
|
||||
0x31c22432,0x820c510,0x830d7082,0x50c35c33,
|
||||
0xc30c338,0xc31c30c3,0x50c30c30,0xc204514,
|
||||
0x890c90c2,0x31440c70,0xa8208208,0xea0df0c3,
|
||||
0x8a231430,0xa28a28a2,0x28a28a1e,0x1861868a,
|
||||
0x48308308,0xc3682483,0x14516453,0x4d965845,
|
||||
0xd4659619,0x36590d94,0xd969964,0x546590d9,
|
||||
0x20c20541,0x920d20c,0x5914f0da,0x96114514,
|
||||
0x65865365,0xe89d3519,0x99e7a279,0x9e89e89e,
|
||||
0x81821827,0xb2032430,0x18b28920,0x422492c7,
|
||||
0xb28b0d72,0x3872c35c,0xc30d72cb,0x32cb2972,
|
||||
0x1c75cb0c,0xc90c204e,0xa2482c80,0x24b1c62c,
|
||||
0xc3a89089,0xb0ea2e42,0x9669a31c,0xa4966a28,
|
||||
0x59a8a269,0x8175e7a,0xb203243,0x718b2892,
|
||||
0x4114105c,0x17597658,0x74ce5d96,0x5c36572d,
|
||||
0xd92d7297,0xe1ce5d70,0xc90c204,0xca2482c8,
|
||||
0x4171c62,0x5d961045,0x976585d6,0x79669533,
|
||||
0x964965a2,0x659689e6,0x308175e7,0x24510510,
|
||||
0x451031c2,0xe2420841,0x5c338714,0x453851c3,
|
||||
0x51c51451,0xc30c31c,0x451450c7,0x41440c20,
|
||||
0xc708914,0x82105144,0xf1c58c90,0x1470ea0d,
|
||||
0x61861863,0x8a1e85e8,0x8687a8a2,0x3081861,
|
||||
0x24853c51,0x5053c368,0x1341144f,0x96194ce5,
|
||||
0x1544d439,0x94385514,0xe0d90d96,0x5415464,
|
||||
0x4f1440c2,0xf0da0921,0x4513d414,0x533944d0,
|
||||
0x350e6586,0x86082181,0xe89e981d,0x18277689,
|
||||
0x10308182,0x89207185,0x41c718b2,0x14e24224,
|
||||
0xc35cb287,0xe1c73871,0x28e1c514,0xc70c32cb,
|
||||
0x204e1c75,0x1c61440c,0xc62ca248,0x90891071,
|
||||
0x2e41c58c,0xa31c70ea,0xe86175e7,0xa269a475,
|
||||
0x5e7a57a8,0x51030817,0x28920718,0xf38718b,
|
||||
0xe5134114,0x39961758,0xe1ce4ce,0x728e3855,
|
||||
0x5ce0d92d,0xc204e1ce,0x81c61440,0x1c62ca24,
|
||||
0xd04503ce,0x85d63944,0x75338e65,0x5d86075e,
|
||||
0x89e69647,0x75e76576,
|
||||
]);
|
||||
Lev2TParametricDescription.prototype.offsetIncrs5 = /*3 bits per value */ new Int32Array([
|
||||
0x10000000,0xc00000,0x60061,0x400,
|
||||
0x0,0x60000008,0x6b003080,0xdb6ab6db,
|
||||
0x2db6,0x800400,0x49245240,0x11482412,
|
||||
0x104904,0x40020000,0x92292000,0xa4b25924,
|
||||
0x9649658,0xd80c000,0xdb0c001b,0x80db6d86,
|
||||
0x6db01b6d,0xc0600003,0x86000d86,0x6db6c36d,
|
||||
0xddadb6ed,0x300001b6,0x6c360,0xe37236e4,
|
||||
0x46db6236,0xdb6c,0x361b018,0xb91b7200,
|
||||
0x6dbb1b71,0x6db763,0x20100820,0x61248001,
|
||||
0x92492490,0x24820004,0x8041000,0x92400090,
|
||||
0x24924830,0x555b6a49,0x2080012,0x20004804,
|
||||
0x49252449,0x84112492,0x4000928,0x240201,
|
||||
0x92922490,0x58924924,0x49456,0x120d8082,
|
||||
0x6da4800,0x69249249,0x249a01b,0x6c04100,
|
||||
0x6d240009,0x92492483,0x24d5adb4,0x60208001,
|
||||
0x92000483,0x24925236,0x6846da49,0x10400092,
|
||||
0x241b0,0x49291b49,0x636d2492,0x92494935,
|
||||
0x24924924,0x49249249,0x92492492,0x24924924,
|
||||
0x49249249,0x92492492,0x24924924,0x49249249,
|
||||
0x92492492,0x24924924,0x49249249,0x92492492,
|
||||
0x24924924,0x49249249,0x92492492,0x24924924,
|
||||
0x49249249,0x92492492,0x24924924,0x49249249,
|
||||
0x92492492,0x24924924,0x49249249,0x92492492,
|
||||
0x24924924,0x49249249,0x92492492,0x24924924,
|
||||
0x49249249,0x92492492,0x24924924,0x49249249,
|
||||
0x92492492,0x24924924,0x49249249,0x92492492,
|
||||
0x24924924,0x49249249,0x92492492,0x24924924,
|
||||
0x49249249,0x92492492,0x24924924,0x49249249,
|
||||
0x92492492,0x24924924,0x49249249,0x92492492,
|
||||
0x24924924,0x49249249,0x92492492,0x24924924,
|
||||
0x49249249,0x92492492,0x24924924,0x49249249,
|
||||
0x92492492,0x24924924,0x49249249,0x92492492,
|
||||
0x24924924,0x49249249,0x92492492,0x24924924,
|
||||
0x49249249,0x92492492,0x24924924,
|
||||
]);
|
||||
|
||||
class Lev1TParametricDescription extends ParametricDescription {
|
||||
/**
|
||||
* @param {number} absState
|
||||
* @param {number} position
|
||||
* @param {number} vector
|
||||
* @returns {number}
|
||||
*/
|
||||
transition(absState, position, vector) {
|
||||
let state = Math.floor(absState / (this.w + 1));
|
||||
let offset = absState % (this.w + 1);
|
||||
|
||||
if (position === this.w) {
|
||||
if (state < 2) {
|
||||
const loc = Math.imul(vector, 2) + state;
|
||||
offset += this.unpack(this.offsetIncrs0, loc, 1);
|
||||
state = this.unpack(this.toStates0, loc, 2) - 1;
|
||||
}
|
||||
} else if (position === this.w - 1) {
|
||||
if (state < 3) {
|
||||
const loc = Math.imul(vector, 3) + state;
|
||||
offset += this.unpack(this.offsetIncrs1, loc, 1);
|
||||
state = this.unpack(this.toStates1, loc, 2) - 1;
|
||||
}
|
||||
} else if (position === this.w - 2) {
|
||||
if (state < 6) {
|
||||
const loc = Math.imul(vector, 6) + state;
|
||||
offset += this.unpack(this.offsetIncrs2, loc, 2);
|
||||
state = this.unpack(this.toStates2, loc, 3) - 1;
|
||||
}
|
||||
} else {
|
||||
// eslint-disable-next-line no-lonely-if
|
||||
if (state < 6) {
|
||||
const loc = Math.imul(vector, 6) + state;
|
||||
offset += this.unpack(this.offsetIncrs3, loc, 2);
|
||||
state = this.unpack(this.toStates3, loc, 3) - 1;
|
||||
}
|
||||
}
|
||||
|
||||
if (state === -1) {
|
||||
// null state
|
||||
return -1;
|
||||
} else {
|
||||
// translate back to abs
|
||||
return Math.imul(state, this.w + 1) + offset;
|
||||
}
|
||||
}
|
||||
|
||||
// state map
|
||||
// 0 -> [(0, 0)]
|
||||
// 1 -> [(0, 1)]
|
||||
// 2 -> [(0, 1), (1, 1)]
|
||||
// 3 -> [(0, 1), (1, 1), (2, 1)]
|
||||
// 4 -> [(0, 1), (2, 1)]
|
||||
// 5 -> [t(0, 1), (0, 1), (1, 1), (2, 1)]
|
||||
|
||||
|
||||
/** @param {number} w - length of word being checked */
|
||||
constructor(w) {
|
||||
super(w, 1, new Int32Array([0,1,0,-1,-1,-1]));
|
||||
}
|
||||
}
|
||||
|
||||
Lev1TParametricDescription.prototype.toStates0 = /*2 bits per value */ new Int32Array([
|
||||
0x2,
|
||||
]);
|
||||
Lev1TParametricDescription.prototype.offsetIncrs0 = /*1 bits per value */ new Int32Array([
|
||||
0x0,
|
||||
]);
|
||||
|
||||
Lev1TParametricDescription.prototype.toStates1 = /*2 bits per value */ new Int32Array([
|
||||
0xa43,
|
||||
]);
|
||||
Lev1TParametricDescription.prototype.offsetIncrs1 = /*1 bits per value */ new Int32Array([
|
||||
0x38,
|
||||
]);
|
||||
|
||||
Lev1TParametricDescription.prototype.toStates2 = /*3 bits per value */ new Int32Array([
|
||||
0x12180003,0xb45a4914,0x69,
|
||||
]);
|
||||
Lev1TParametricDescription.prototype.offsetIncrs2 = /*2 bits per value */ new Int32Array([
|
||||
0x558a0000,0x5555,
|
||||
]);
|
||||
|
||||
Lev1TParametricDescription.prototype.toStates3 = /*3 bits per value */ new Int32Array([
|
||||
0x900c0003,0xa1904864,0x45a49169,0x5a6d196a,
|
||||
0x9634,
|
||||
]);
|
||||
Lev1TParametricDescription.prototype.offsetIncrs3 = /*2 bits per value */ new Int32Array([
|
||||
0xa0fc0000,0x5555ba08,0x55555555,
|
||||
]);
|
||||
@@ -10,6 +10,6 @@
|
||||
"skipLibCheck": true
|
||||
},
|
||||
"typeAcquisition": {
|
||||
"include": ["./rustdoc.d.ts"]
|
||||
"include": ["./rustdoc.d.ts", "./stringdex.d.ts"]
|
||||
}
|
||||
}
|
||||
|
||||
@@ -80,6 +80,7 @@ pub(crate) fn for_each<E>(f: impl Fn(&StaticFile) -> Result<(), E>) -> Result<()
|
||||
normalize_css => "static/css/normalize.css",
|
||||
main_js => "static/js/main.js",
|
||||
search_js => "static/js/search.js",
|
||||
stringdex_js => "static/js/stringdex.js",
|
||||
settings_js => "static/js/settings.js",
|
||||
src_script_js => "static/js/src-script.js",
|
||||
storage_js => "static/js/storage.js",
|
||||
|
||||
@@ -29,6 +29,7 @@
|
||||
data-rustdoc-version="{{rustdoc_version}}" {#+ #}
|
||||
data-channel="{{rust_channel}}" {#+ #}
|
||||
data-search-js="{{files.search_js}}" {#+ #}
|
||||
data-stringdex-js="{{files.stringdex_js}}" {#+ #}
|
||||
data-settings-js="{{files.settings_js}}" {#+ #}
|
||||
> {# #}
|
||||
<script src="{{static_root_path|safe}}{{files.storage_js}}"></script>
|
||||
@@ -72,18 +73,9 @@
|
||||
<![endif]-->
|
||||
{{ layout.external_html.before_content|safe }}
|
||||
{% if page.css_class != "src" %}
|
||||
<nav class="mobile-topbar"> {# #}
|
||||
<button class="sidebar-menu-toggle" title="show sidebar"></button>
|
||||
{% if !layout.logo.is_empty() || page.rust_logo %}
|
||||
<a class="logo-container" href="{{page.root_path|safe}}{{display_krate_with_trailing_slash|safe}}index.html">
|
||||
{% if page.rust_logo %}
|
||||
<img class="rust-logo" src="{{static_root_path|safe}}{{files.rust_logo_svg}}" alt="">
|
||||
{% else if !layout.logo.is_empty() %}
|
||||
<img src="{{layout.logo}}" alt="">
|
||||
{% endif %}
|
||||
</a>
|
||||
{% endif %}
|
||||
</nav>
|
||||
<rustdoc-topbar> {# #}
|
||||
<h2><a href="#">{{page.short_title}}</a></h2> {# #}
|
||||
</rustdoc-topbar>
|
||||
{% endif %}
|
||||
<nav class="sidebar">
|
||||
{% if page.css_class != "src" %}
|
||||
@@ -117,9 +109,6 @@
|
||||
<div class="sidebar-resizer" title="Drag to resize sidebar"></div> {# #}
|
||||
<main>
|
||||
{% if page.css_class != "src" %}<div class="width-limiter">{% endif %}
|
||||
{# defined in storage.js to avoid duplicating complex UI across every page #}
|
||||
{# and because the search form only works if JS is enabled anyway #}
|
||||
<rustdoc-search></rustdoc-search> {# #}
|
||||
<section id="main-content" class="content">{{ content|safe }}</section>
|
||||
{% if page.css_class != "src" %}</div>{% endif %}
|
||||
</main>
|
||||
|
||||
@@ -12,8 +12,8 @@
|
||||
<h1>
|
||||
{{typ}}
|
||||
<span{% if item_type != "mod" +%} class="{{item_type}}"{% endif %}>
|
||||
{{name}}
|
||||
</span> {# #}
|
||||
{{name|wrapped|safe}}
|
||||
</span> {# #}
|
||||
<button id="copy-path" title="Copy item path to clipboard"> {# #}
|
||||
Copy item path {# #}
|
||||
</button> {# #}
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
/* global globalThis */
|
||||
|
||||
const fs = require("fs");
|
||||
const path = require("path");
|
||||
|
||||
const { isGeneratorObject } = require("util/types");
|
||||
|
||||
function arrayToCode(array) {
|
||||
return array.map((value, index) => {
|
||||
@@ -45,23 +46,16 @@ function shouldIgnoreField(fieldName) {
|
||||
}
|
||||
|
||||
function valueMapper(key, testOutput) {
|
||||
const isAlias = testOutput["is_alias"];
|
||||
let value = testOutput[key];
|
||||
// To make our life easier, if there is a "parent" type, we add it to the path.
|
||||
if (key === "path") {
|
||||
if (testOutput["parent"] !== undefined) {
|
||||
if (testOutput["parent"]) {
|
||||
if (value.length > 0) {
|
||||
value += "::" + testOutput["parent"]["name"];
|
||||
} else {
|
||||
value = testOutput["parent"]["name"];
|
||||
}
|
||||
} else if (testOutput["is_alias"]) {
|
||||
value = valueMapper(key, testOutput["original"]);
|
||||
}
|
||||
} else if (isAlias && key === "alias") {
|
||||
value = testOutput["name"];
|
||||
} else if (isAlias && ["name"].includes(key)) {
|
||||
value = testOutput["original"][key];
|
||||
}
|
||||
return value;
|
||||
}
|
||||
@@ -237,7 +231,7 @@ async function runSearch(query, expected, doSearch, loadedFile, queryName) {
|
||||
const ignore_order = loadedFile.ignore_order;
|
||||
const exact_check = loadedFile.exact_check;
|
||||
|
||||
const results = await doSearch(query, loadedFile.FILTER_CRATE);
|
||||
const { resultsTable } = await doSearch(query, loadedFile.FILTER_CRATE);
|
||||
const error_text = [];
|
||||
|
||||
for (const key in expected) {
|
||||
@@ -247,37 +241,38 @@ async function runSearch(query, expected, doSearch, loadedFile, queryName) {
|
||||
if (!Object.prototype.hasOwnProperty.call(expected, key)) {
|
||||
continue;
|
||||
}
|
||||
if (!Object.prototype.hasOwnProperty.call(results, key)) {
|
||||
if (!Object.prototype.hasOwnProperty.call(resultsTable, key)) {
|
||||
error_text.push("==> Unknown key \"" + key + "\"");
|
||||
break;
|
||||
}
|
||||
const entry = expected[key];
|
||||
|
||||
if (exact_check && entry.length !== results[key].length) {
|
||||
if (exact_check && entry.length !== resultsTable[key].length) {
|
||||
error_text.push(queryName + "==> Expected exactly " + entry.length +
|
||||
" results but found " + results[key].length + " in '" + key + "'");
|
||||
" results but found " + resultsTable[key].length + " in '" + key + "'");
|
||||
}
|
||||
|
||||
let prev_pos = -1;
|
||||
for (const [index, elem] of entry.entries()) {
|
||||
const entry_pos = lookForEntry(elem, results[key]);
|
||||
const entry_pos = lookForEntry(elem, resultsTable[key]);
|
||||
if (entry_pos === -1) {
|
||||
error_text.push(queryName + "==> Result not found in '" + key + "': '" +
|
||||
JSON.stringify(elem) + "'");
|
||||
// By default, we just compare the two first items.
|
||||
let item_to_diff = 0;
|
||||
if ((!ignore_order || exact_check) && index < results[key].length) {
|
||||
if ((!ignore_order || exact_check) && index < resultsTable[key].length) {
|
||||
item_to_diff = index;
|
||||
}
|
||||
error_text.push("Diff of first error:\n" +
|
||||
betterLookingDiff(elem, results[key][item_to_diff]));
|
||||
betterLookingDiff(elem, resultsTable[key][item_to_diff]));
|
||||
} else if (exact_check === true && prev_pos + 1 !== entry_pos) {
|
||||
error_text.push(queryName + "==> Exact check failed at position " + (prev_pos + 1) +
|
||||
": expected '" + JSON.stringify(elem) + "' but found '" +
|
||||
JSON.stringify(results[key][index]) + "'");
|
||||
JSON.stringify(resultsTable[key][index]) + "'");
|
||||
} else if (ignore_order === false && entry_pos < prev_pos) {
|
||||
error_text.push(queryName + "==> '" + JSON.stringify(elem) + "' was supposed " +
|
||||
"to be before '" + JSON.stringify(results[key][prev_pos]) + "'");
|
||||
error_text.push(queryName + "==> '" +
|
||||
JSON.stringify(elem) + "' was supposed to be before '" +
|
||||
JSON.stringify(resultsTable[key][prev_pos]) + "'");
|
||||
} else {
|
||||
prev_pos = entry_pos;
|
||||
}
|
||||
@@ -286,19 +281,20 @@ async function runSearch(query, expected, doSearch, loadedFile, queryName) {
|
||||
return error_text;
|
||||
}
|
||||
|
||||
async function runCorrections(query, corrections, getCorrections, loadedFile) {
|
||||
const qc = await getCorrections(query, loadedFile.FILTER_CRATE);
|
||||
async function runCorrections(query, corrections, doSearch, loadedFile) {
|
||||
const { parsedQuery } = await doSearch(query, loadedFile.FILTER_CRATE);
|
||||
const qc = parsedQuery.correction;
|
||||
const error_text = [];
|
||||
|
||||
if (corrections === null) {
|
||||
if (qc !== null) {
|
||||
error_text.push(`==> expected = null, found = ${qc}`);
|
||||
error_text.push(`==> [correction] expected = null, found = ${qc}`);
|
||||
}
|
||||
return error_text;
|
||||
}
|
||||
|
||||
if (qc !== corrections.toLowerCase()) {
|
||||
error_text.push(`==> expected = ${corrections}, found = ${qc}`);
|
||||
if (qc.toLowerCase() !== corrections.toLowerCase()) {
|
||||
error_text.push(`==> [correction] expected = ${corrections}, found = ${qc}`);
|
||||
}
|
||||
|
||||
return error_text;
|
||||
@@ -320,7 +316,7 @@ function checkResult(error_text, loadedFile, displaySuccess) {
|
||||
return 1;
|
||||
}
|
||||
|
||||
async function runCheckInner(callback, loadedFile, entry, getCorrections, extra) {
|
||||
async function runCheckInner(callback, loadedFile, entry, extra, doSearch) {
|
||||
if (typeof entry.query !== "string") {
|
||||
console.log("FAILED");
|
||||
console.log("==> Missing `query` field");
|
||||
@@ -338,7 +334,7 @@ async function runCheckInner(callback, loadedFile, entry, getCorrections, extra)
|
||||
error_text = await runCorrections(
|
||||
entry.query,
|
||||
entry.correction,
|
||||
getCorrections,
|
||||
doSearch,
|
||||
loadedFile,
|
||||
);
|
||||
if (checkResult(error_text, loadedFile, false) !== 0) {
|
||||
@@ -348,16 +344,16 @@ async function runCheckInner(callback, loadedFile, entry, getCorrections, extra)
|
||||
return true;
|
||||
}
|
||||
|
||||
async function runCheck(loadedFile, key, getCorrections, callback) {
|
||||
async function runCheck(loadedFile, key, doSearch, callback) {
|
||||
const expected = loadedFile[key];
|
||||
|
||||
if (Array.isArray(expected)) {
|
||||
for (const entry of expected) {
|
||||
if (!await runCheckInner(callback, loadedFile, entry, getCorrections, true)) {
|
||||
if (!await runCheckInner(callback, loadedFile, entry, true, doSearch)) {
|
||||
return 1;
|
||||
}
|
||||
}
|
||||
} else if (!await runCheckInner(callback, loadedFile, expected, getCorrections, false)) {
|
||||
} else if (!await runCheckInner(callback, loadedFile, expected, false, doSearch)) {
|
||||
return 1;
|
||||
}
|
||||
console.log("OK");
|
||||
@@ -368,7 +364,7 @@ function hasCheck(content, checkName) {
|
||||
return content.startsWith(`const ${checkName}`) || content.includes(`\nconst ${checkName}`);
|
||||
}
|
||||
|
||||
async function runChecks(testFile, doSearch, parseQuery, getCorrections) {
|
||||
async function runChecks(testFile, doSearch, parseQuery) {
|
||||
let checkExpected = false;
|
||||
let checkParsed = false;
|
||||
let testFileContent = readFile(testFile);
|
||||
@@ -397,12 +393,12 @@ async function runChecks(testFile, doSearch, parseQuery, getCorrections) {
|
||||
let res = 0;
|
||||
|
||||
if (checkExpected) {
|
||||
res += await runCheck(loadedFile, "EXPECTED", getCorrections, (query, expected, text) => {
|
||||
res += await runCheck(loadedFile, "EXPECTED", doSearch, (query, expected, text) => {
|
||||
return runSearch(query, expected, doSearch, loadedFile, text);
|
||||
});
|
||||
}
|
||||
if (checkParsed) {
|
||||
res += await runCheck(loadedFile, "PARSED", getCorrections, (query, expected, text) => {
|
||||
res += await runCheck(loadedFile, "PARSED", doSearch, (query, expected, text) => {
|
||||
return runParser(query, expected, parseQuery, text);
|
||||
});
|
||||
}
|
||||
@@ -416,71 +412,89 @@ async function runChecks(testFile, doSearch, parseQuery, getCorrections) {
|
||||
* @param {string} resource_suffix - Version number between filename and .js, e.g. "1.59.0"
|
||||
* @returns {Object} - Object containing keys: `doSearch`, which runs a search
|
||||
* with the loaded index and returns a table of results; `parseQuery`, which is the
|
||||
* `parseQuery` function exported from the search module; and `getCorrections`, which runs
|
||||
* `parseQuery` function exported from the search module, which runs
|
||||
* a search but returns type name corrections instead of results.
|
||||
*/
|
||||
function loadSearchJS(doc_folder, resource_suffix) {
|
||||
const searchIndexJs = path.join(doc_folder, "search-index" + resource_suffix + ".js");
|
||||
const searchIndex = require(searchIndexJs);
|
||||
|
||||
globalThis.searchState = {
|
||||
descShards: new Map(),
|
||||
loadDesc: async function({descShard, descIndex}) {
|
||||
if (descShard.promise === null) {
|
||||
descShard.promise = new Promise((resolve, reject) => {
|
||||
descShard.resolve = resolve;
|
||||
const ds = descShard;
|
||||
const fname = `${ds.crate}-desc-${ds.shard}-${resource_suffix}.js`;
|
||||
fs.readFile(
|
||||
`${doc_folder}/search.desc/${descShard.crate}/${fname}`,
|
||||
(err, data) => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
} else {
|
||||
eval(data.toString("utf8"));
|
||||
}
|
||||
},
|
||||
);
|
||||
});
|
||||
}
|
||||
const list = await descShard.promise;
|
||||
return list[descIndex];
|
||||
},
|
||||
loadedDescShard: function(crate, shard, data) {
|
||||
this.descShards.get(crate)[shard].resolve(data.split("\n"));
|
||||
},
|
||||
};
|
||||
|
||||
async function loadSearchJS(doc_folder, resource_suffix) {
|
||||
const staticFiles = path.join(doc_folder, "static.files");
|
||||
const stringdexJs = fs.readdirSync(staticFiles).find(f => f.match(/stringdex.*\.js$/));
|
||||
const stringdexModule = require(path.join(staticFiles, stringdexJs));
|
||||
const searchJs = fs.readdirSync(staticFiles).find(f => f.match(/search.*\.js$/));
|
||||
const searchModule = require(path.join(staticFiles, searchJs));
|
||||
searchModule.initSearch(searchIndex.searchIndex);
|
||||
const docSearch = searchModule.docSearch;
|
||||
globalThis.nonnull = (x, msg) => {
|
||||
if (x === null) {
|
||||
throw (msg || "unexpected null value!");
|
||||
} else {
|
||||
return x;
|
||||
}
|
||||
};
|
||||
const { docSearch, DocSearch } = await searchModule.initSearch(
|
||||
stringdexModule.Stringdex,
|
||||
stringdexModule.RoaringBitmap,
|
||||
{
|
||||
loadRoot: callbacks => {
|
||||
for (const key in callbacks) {
|
||||
if (Object.hasOwn(callbacks, key)) {
|
||||
globalThis[key] = callbacks[key];
|
||||
}
|
||||
}
|
||||
const rootJs = readFile(path.join(doc_folder, "search.index/root" +
|
||||
resource_suffix + ".js"));
|
||||
eval(rootJs);
|
||||
},
|
||||
loadTreeByHash: hashHex => {
|
||||
const shardJs = readFile(path.join(doc_folder, "search.index/" + hashHex + ".js"));
|
||||
eval(shardJs);
|
||||
},
|
||||
loadDataByNameAndHash: (name, hashHex) => {
|
||||
const shardJs = readFile(path.join(doc_folder, "search.index/" + name + "/" +
|
||||
hashHex + ".js"));
|
||||
eval(shardJs);
|
||||
},
|
||||
},
|
||||
);
|
||||
return {
|
||||
doSearch: async function(queryStr, filterCrate, currentCrate) {
|
||||
const result = await docSearch.execQuery(searchModule.parseQuery(queryStr),
|
||||
filterCrate, currentCrate);
|
||||
const parsedQuery = DocSearch.parseQuery(queryStr);
|
||||
const result = await docSearch.execQuery(parsedQuery, filterCrate, currentCrate);
|
||||
const resultsTable = {};
|
||||
for (const tab in result) {
|
||||
if (!Object.prototype.hasOwnProperty.call(result, tab)) {
|
||||
continue;
|
||||
}
|
||||
if (!(result[tab] instanceof Array)) {
|
||||
if (!isGeneratorObject(result[tab])) {
|
||||
continue;
|
||||
}
|
||||
for (const entry of result[tab]) {
|
||||
resultsTable[tab] = [];
|
||||
for await (const entry of result[tab]) {
|
||||
const flatEntry = Object.assign({
|
||||
crate: entry.item.crate,
|
||||
name: entry.item.name,
|
||||
path: entry.item.modulePath,
|
||||
exactPath: entry.item.exactModulePath,
|
||||
ty: entry.item.ty,
|
||||
}, entry);
|
||||
for (const key in entry) {
|
||||
if (!Object.prototype.hasOwnProperty.call(entry, key)) {
|
||||
continue;
|
||||
}
|
||||
if (key === "displayTypeSignature" && entry.displayTypeSignature !== null) {
|
||||
const {type, mappedNames, whereClause} =
|
||||
await entry.displayTypeSignature;
|
||||
entry.displayType = arrayToCode(type);
|
||||
entry.displayMappedNames = [...mappedNames.entries()]
|
||||
if (key === "desc" && entry.desc !== null) {
|
||||
flatEntry.desc = await entry.desc;
|
||||
} else if (key === "displayTypeSignature" &&
|
||||
entry.displayTypeSignature !== null
|
||||
) {
|
||||
flatEntry.displayTypeSignature = await entry.displayTypeSignature;
|
||||
const {
|
||||
type,
|
||||
mappedNames,
|
||||
whereClause,
|
||||
} = flatEntry.displayTypeSignature;
|
||||
flatEntry.displayType = arrayToCode(type);
|
||||
flatEntry.displayMappedNames = [...mappedNames.entries()]
|
||||
.map(([name, qname]) => {
|
||||
return `${name} = ${qname}`;
|
||||
}).join(", ");
|
||||
entry.displayWhereClause = [...whereClause.entries()]
|
||||
flatEntry.displayWhereClause = [...whereClause.entries()]
|
||||
.flatMap(([name, value]) => {
|
||||
if (value.length === 0) {
|
||||
return [];
|
||||
@@ -489,16 +503,12 @@ function loadSearchJS(doc_folder, resource_suffix) {
|
||||
}).join(", ");
|
||||
}
|
||||
}
|
||||
resultsTable[tab].push(flatEntry);
|
||||
}
|
||||
}
|
||||
return result;
|
||||
return { resultsTable, parsedQuery };
|
||||
},
|
||||
getCorrections: function(queryStr, filterCrate, currentCrate) {
|
||||
const parsedQuery = searchModule.parseQuery(queryStr);
|
||||
docSearch.execQuery(parsedQuery, filterCrate, currentCrate);
|
||||
return parsedQuery.correction;
|
||||
},
|
||||
parseQuery: searchModule.parseQuery,
|
||||
parseQuery: DocSearch.parseQuery,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -570,7 +580,7 @@ async function main(argv) {
|
||||
return 1;
|
||||
}
|
||||
|
||||
const parseAndSearch = loadSearchJS(
|
||||
const parseAndSearch = await loadSearchJS(
|
||||
opts["doc_folder"],
|
||||
opts["resource_suffix"],
|
||||
);
|
||||
@@ -579,14 +589,11 @@ async function main(argv) {
|
||||
const doSearch = function(queryStr, filterCrate) {
|
||||
return parseAndSearch.doSearch(queryStr, filterCrate, opts["crate_name"]);
|
||||
};
|
||||
const getCorrections = function(queryStr, filterCrate) {
|
||||
return parseAndSearch.getCorrections(queryStr, filterCrate, opts["crate_name"]);
|
||||
};
|
||||
|
||||
if (opts["test_file"].length !== 0) {
|
||||
for (const file of opts["test_file"]) {
|
||||
process.stdout.write(`Testing ${file} ... `);
|
||||
errors += await runChecks(file, doSearch, parseAndSearch.parseQuery, getCorrections);
|
||||
errors += await runChecks(file, doSearch, parseAndSearch.parseQuery);
|
||||
}
|
||||
} else if (opts["test_folder"].length !== 0) {
|
||||
for (const file of fs.readdirSync(opts["test_folder"])) {
|
||||
@@ -595,7 +602,7 @@ async function main(argv) {
|
||||
}
|
||||
process.stdout.write(`Testing ${file} ... `);
|
||||
errors += await runChecks(path.join(opts["test_folder"], file), doSearch,
|
||||
parseAndSearch.parseQuery, getCorrections);
|
||||
parseAndSearch.parseQuery);
|
||||
}
|
||||
}
|
||||
return errors > 0 ? 1 : 0;
|
||||
|
||||
@@ -19,7 +19,7 @@ fn main() {
|
||||
.args(&["--extend-css", "z.css"])
|
||||
.input("x.rs")
|
||||
.run();
|
||||
assert!(path("invocation-only/search-index-xxx.js").exists());
|
||||
assert!(path("invocation-only/search.index/root-xxx.js").exists());
|
||||
assert!(path("invocation-only/crates-xxx.js").exists());
|
||||
assert!(path("invocation-only/settings.html").exists());
|
||||
assert!(path("invocation-only/x/all.html").exists());
|
||||
|
||||
@@ -15,7 +15,7 @@ fn main() {
|
||||
rustdoc().input("foo.rs").out_dir(&bar_first).run();
|
||||
|
||||
diff()
|
||||
.expected_file(foo_first.join("search-index.js"))
|
||||
.actual_file(bar_first.join("search-index.js"))
|
||||
.expected_file(foo_first.join("search.index/root.js"))
|
||||
.actual_file(bar_first.join("search.index/root.js"))
|
||||
.run();
|
||||
}
|
||||
|
||||
@@ -5,25 +5,25 @@ include: "utils.goml"
|
||||
// First we check we "hover".
|
||||
move-cursor-to: ".example-wrap"
|
||||
assert-css: (".example-wrap .copy-button", { "visibility": "visible" })
|
||||
move-cursor-to: ".search-input"
|
||||
move-cursor-to: "#search-button"
|
||||
assert-css: (".example-wrap .copy-button", { "visibility": "hidden" })
|
||||
|
||||
// Now we check the click.
|
||||
assert-count: (".example-wrap:not(:hover) .button-holder.keep-visible", 0)
|
||||
click: ".example-wrap"
|
||||
move-cursor-to: ".search-input"
|
||||
move-cursor-to: "#search-button"
|
||||
// It should have a new class and be visible.
|
||||
wait-for-count: (".example-wrap:not(:hover) .button-holder.keep-visible", 1)
|
||||
wait-for-css: (".example-wrap:not(:hover) .button-holder.keep-visible", { "visibility": "visible" })
|
||||
// Clicking again will remove the class.
|
||||
click: ".example-wrap"
|
||||
move-cursor-to: ".search-input"
|
||||
move-cursor-to: "rustdoc-toolbar #search-button"
|
||||
assert-count: (".example-wrap:not(:hover) .button-holder.keep-visible", 0)
|
||||
assert-css: (".example-wrap .copy-button", { "visibility": "hidden" })
|
||||
|
||||
// Clicking on the "copy code" button shouldn't make the buttons stick.
|
||||
click: ".example-wrap .copy-button"
|
||||
move-cursor-to: ".search-input"
|
||||
move-cursor-to: "#search-button"
|
||||
assert-count: (".example-wrap:not(:hover) .button-holder.keep-visible", 0)
|
||||
assert-css: (".example-wrap .copy-button", { "visibility": "hidden" })
|
||||
// Since we clicked on the copy button, the clipboard content should have been updated.
|
||||
|
||||
@@ -12,7 +12,7 @@ define-function: (
|
||||
assert-count: (".example-wrap .copy-button", 1)
|
||||
// We now ensure it's only displayed when the example is hovered.
|
||||
assert-css: (".example-wrap .copy-button", { "visibility": "visible" })
|
||||
move-cursor-to: ".search-input"
|
||||
move-cursor-to: "rustdoc-toolbar #search-button"
|
||||
assert-css: (".example-wrap .copy-button", { "visibility": "hidden" })
|
||||
// Checking that the copy button has the same size as the "copy path" button.
|
||||
compare-elements-size: (
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
// This test ensures that several clickable items actually have the pointer cursor.
|
||||
include: "utils.goml"
|
||||
go-to: "file://" + |DOC_PATH| + "/lib2/struct.Foo.html"
|
||||
|
||||
// the `[+]/[-]` button
|
||||
@@ -8,11 +9,7 @@ assert-css: ("#toggle-all-docs", {"cursor": "pointer"})
|
||||
assert-css: ("#copy-path", {"cursor": "pointer"})
|
||||
|
||||
// the search tabs
|
||||
write-into: (".search-input", "Foo")
|
||||
// To be SURE that the search will be run.
|
||||
press-key: 'Enter'
|
||||
// Waiting for the search results to appear...
|
||||
wait-for: "#search-tabs"
|
||||
call-function: ("perform-search", {"query": "Foo"})
|
||||
assert-css: ("#search-tabs > button", {"cursor": "pointer"})
|
||||
|
||||
// mobile sidebar toggle button
|
||||
|
||||
@@ -69,7 +69,7 @@ call-function: ("check-colors", {
|
||||
// and make sure it goes away.
|
||||
|
||||
// First, open the settings menu.
|
||||
click: "#settings-menu"
|
||||
click: "rustdoc-toolbar .settings-menu"
|
||||
wait-for: "#settings"
|
||||
assert-css: ("#settings", {"display": "block"})
|
||||
|
||||
@@ -121,7 +121,7 @@ call-function: ("check-padding", {
|
||||
define-function: ("check-line-numbers-existence", [], block {
|
||||
assert-local-storage: {"rustdoc-line-numbers": "true" }
|
||||
assert-false: ".example-line-numbers"
|
||||
click: "#settings-menu"
|
||||
click: "rustdoc-toolbar .settings-menu"
|
||||
wait-for: "#settings"
|
||||
|
||||
// Then, click the toggle button.
|
||||
@@ -137,7 +137,7 @@ define-function: ("check-line-numbers-existence", [], block {
|
||||
// Line numbers should still be there.
|
||||
assert-css: ("[data-nosnippet]", { "display": "block"})
|
||||
// Closing settings menu.
|
||||
click: "#settings-menu"
|
||||
click: "rustdoc-toolbar .settings-menu"
|
||||
wait-for-css: ("#settings", {"display": "none"})
|
||||
})
|
||||
|
||||
@@ -168,7 +168,7 @@ assert: ".example-wrap > pre.rust"
|
||||
assert-count: (".example-wrap", 2)
|
||||
assert-count: (".example-wrap.digits-1", 2)
|
||||
|
||||
click: "#settings-menu"
|
||||
click: "rustdoc-toolbar .settings-menu"
|
||||
wait-for: "#settings"
|
||||
|
||||
// Then, click the toggle button.
|
||||
|
||||
@@ -1,13 +1,10 @@
|
||||
// This test ensures that the "Escape" shortcut is handled correctly based on the
|
||||
// current content displayed.
|
||||
include: "utils.goml"
|
||||
go-to: "file://" + |DOC_PATH| + "/test_docs/index.html"
|
||||
// First, we check that the search results are hidden when the Escape key is pressed.
|
||||
write-into: (".search-input", "test")
|
||||
// To be SURE that the search will be run.
|
||||
press-key: 'Enter'
|
||||
wait-for: "#search h1" // The search element is empty before the first search
|
||||
call-function: ("perform-search", {"query": "test"})
|
||||
// Check that the currently displayed element is search.
|
||||
wait-for: "#alternative-display #search"
|
||||
assert-attribute: ("#main-content", {"class": "content hidden"})
|
||||
assert-document-property: ({"URL": "index.html?search=test"}, ENDS_WITH)
|
||||
press-key: "Escape"
|
||||
@@ -17,8 +14,8 @@ assert-false: "#alternative-display #search"
|
||||
assert-attribute: ("#main-content", {"class": "content"})
|
||||
assert-document-property: ({"URL": "index.html"}, [ENDS_WITH])
|
||||
|
||||
// Check that focusing the search input brings back the search results
|
||||
focus: ".search-input"
|
||||
// Check that clicking the search button brings back the search results
|
||||
click: "#search-button"
|
||||
wait-for: "#alternative-display #search"
|
||||
assert-attribute: ("#main-content", {"class": "content hidden"})
|
||||
assert-document-property: ({"URL": "index.html?search=test"}, ENDS_WITH)
|
||||
|
||||
@@ -8,7 +8,7 @@ assert-css: ("body", {"font-family": |serif_font|})
|
||||
assert-css: ("p code", {"font-family": |serif_code_font|})
|
||||
|
||||
// We now switch to the sans serif font
|
||||
click: "#settings-menu"
|
||||
click: "main .settings-menu"
|
||||
wait-for: "#sans-serif-fonts"
|
||||
click: "#sans-serif-fonts"
|
||||
|
||||
@@ -23,7 +23,7 @@ assert-css: ("body", {"font-family": |font|})
|
||||
assert-css: ("p code", {"font-family": |code_font|})
|
||||
|
||||
// We switch back to the serif font
|
||||
click: "#settings-menu"
|
||||
click: "main .settings-menu"
|
||||
wait-for: "#sans-serif-fonts"
|
||||
click: "#sans-serif-fonts"
|
||||
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
// Make sure search stores its data in `window`
|
||||
// It needs to use a global to avoid racing on search-index.js and search.js
|
||||
// https://github.com/rust-lang/rust/pull/118961
|
||||
include: "utils.goml"
|
||||
|
||||
// URL query
|
||||
go-to: "file://" + |DOC_PATH| + "/test_docs/index.html?search=sa'%3Bda'%3Bds"
|
||||
@@ -9,9 +10,7 @@ assert-window-property-false: {"searchIndex": null}
|
||||
|
||||
// Form input
|
||||
go-to: "file://" + |DOC_PATH| + "/test_docs/index.html"
|
||||
write-into: (".search-input", "Foo")
|
||||
press-key: 'Enter'
|
||||
wait-for: "#search-tabs"
|
||||
call-function: ("perform-search", {"query": "Foo"})
|
||||
assert-window-property-false: {"searchIndex": null}
|
||||
|
||||
// source sidebar
|
||||
|
||||
@@ -6,12 +6,12 @@ assert-css: ("#help", {"display": "block"})
|
||||
assert-css: ("#help dd", {"font-size": "16px"})
|
||||
assert-false: "#help-button > a"
|
||||
assert-css: ("#help", {"display": "block"})
|
||||
compare-elements-property: (".sub", "#help", ["offsetWidth"])
|
||||
compare-elements-position: (".sub", "#help", ["x"])
|
||||
compare-elements-property: (".main-heading", "#help", ["offsetWidth"])
|
||||
compare-elements-position: (".main-heading", "#help", ["x"])
|
||||
set-window-size: (500, 1000) // Try mobile next.
|
||||
assert-css: ("#help", {"display": "block"})
|
||||
compare-elements-property: (".sub", "#help", ["offsetWidth"])
|
||||
compare-elements-position: (".sub", "#help", ["x"])
|
||||
compare-elements-property: (".main-heading", "#help", ["offsetWidth"])
|
||||
compare-elements-position: (".main-heading", "#help", ["x"])
|
||||
|
||||
// Checking the color of the elements of the help menu.
|
||||
show-text: true
|
||||
@@ -54,19 +54,17 @@ go-to: "file://" + |DOC_PATH| + "/test_docs/index.html?search=a"
|
||||
wait-for: "#search-tabs" // Waiting for the search.js to load.
|
||||
set-window-size: (1000, 1000) // Only supported on desktop.
|
||||
assert-false: "#help"
|
||||
click: "#help-button > a"
|
||||
click: "rustdoc-toolbar .help-menu > a"
|
||||
assert-css: ("#help", {"display": "block"})
|
||||
assert-css: ("#help dd", {"font-size": "16px"})
|
||||
click: "#help-button > a"
|
||||
assert-css: ("#help", {"display": "none"})
|
||||
compare-elements-property-false: (".sub", "#help", ["offsetWidth"])
|
||||
compare-elements-position-false: (".sub", "#help", ["x"])
|
||||
click: "rustdoc-toolbar .help-menu > a"
|
||||
assert-false: "#help"
|
||||
|
||||
// This test ensures that the "the rustdoc book" anchor link within the help popover works.
|
||||
go-to: "file://" + |DOC_PATH| + "/test_docs/index.html?search=a"
|
||||
wait-for: "#search-tabs" // Waiting for the search.js to load.
|
||||
set-window-size: (1000, 1000) // Popover only appears when the screen width is >700px.
|
||||
assert-false: "#help"
|
||||
click: "#help-button > a"
|
||||
click: "rustdoc-toolbar .help-menu > a"
|
||||
click: "//*[@id='help']//a[text()='the rustdoc book']"
|
||||
wait-for-document-property: ({"URL": "https://doc.rust-lang.org/"}, STARTS_WITH)
|
||||
|
||||
@@ -1,20 +1,19 @@
|
||||
// Checks sidebar resizing stays synced with the setting
|
||||
go-to: "file://" + |DOC_PATH| + "/test_docs/index.html"
|
||||
go-to: "file://" + |DOC_PATH| + "/settings.html"
|
||||
set-window-size: (400, 600)
|
||||
|
||||
// Verify that the "hide" option is unchecked
|
||||
click: "#settings-menu"
|
||||
wait-for: "#settings"
|
||||
assert-css: ("#settings", {"display": "block"})
|
||||
assert-property: ("#hide-sidebar", {"checked": "false"})
|
||||
assert-css: (".mobile-topbar", {"display": "flex"})
|
||||
assert-css: ("rustdoc-topbar", {"display": "flex"})
|
||||
|
||||
// Toggle it
|
||||
click: "#hide-sidebar"
|
||||
assert-property: ("#hide-sidebar", {"checked": "true"})
|
||||
assert-css: (".mobile-topbar", {"display": "none"})
|
||||
assert-css: ("rustdoc-topbar", {"display": "none"})
|
||||
|
||||
// Toggle it again
|
||||
click: "#hide-sidebar"
|
||||
assert-property: ("#hide-sidebar", {"checked": "false"})
|
||||
assert-css: (".mobile-topbar", {"display": "flex"})
|
||||
assert-css: ("rustdoc-topbar", {"display": "flex"})
|
||||
|
||||
@@ -8,8 +8,3 @@ assert-property: (".sidebar-crate .logo-container", {"offsetWidth": "96", "offse
|
||||
// offsetWidth = width of sidebar, offsetHeight = height + top padding
|
||||
assert-property: (".sidebar-crate .logo-container img", {"offsetWidth": "48", "offsetHeight": 64})
|
||||
assert-css: (".sidebar-crate .logo-container img", {"border-top-width": "16px", "margin-top": "-16px"})
|
||||
|
||||
set-window-size: (400, 600)
|
||||
// offset = size + margin
|
||||
assert-property: (".mobile-topbar .logo-container", {"offsetWidth": "55", "offsetHeight": 45})
|
||||
assert-property: (".mobile-topbar .logo-container img", {"offsetWidth": "35", "offsetHeight": 35})
|
||||
|
||||
@@ -20,7 +20,7 @@ store-position: (
|
||||
{"x": second_line_x, "y": second_line_y},
|
||||
)
|
||||
assert: |first_line_x| != |second_line_x| && |first_line_x| == 521 && |second_line_x| == 277
|
||||
assert: |first_line_y| != |second_line_y| && |first_line_y| == 718 && |second_line_y| == 741
|
||||
assert: |first_line_y| != |second_line_y| && |first_line_y| == 676 && |second_line_y| == 699
|
||||
|
||||
// Now we ensure that they're not rendered on the same line.
|
||||
set-window-size: (1100, 800)
|
||||
|
||||
@@ -5,18 +5,18 @@ go-to: "file://" + |DOC_PATH| + "/test_docs/index.html"
|
||||
// First we change the title to make it big.
|
||||
set-window-size: (350, 800)
|
||||
// We ensure that the "format" of the title is the same as the one we'll use.
|
||||
assert-text: (".mobile-topbar .location a", "test_docs")
|
||||
assert-text: ("rustdoc-topbar h2 a", "Crate test_docs")
|
||||
// We store the height we know is correct.
|
||||
store-property: (".mobile-topbar .location", {"offsetHeight": height})
|
||||
store-property: ("rustdoc-topbar h2", {"offsetHeight": height})
|
||||
// We change the crate name to something longer.
|
||||
set-text: (".mobile-topbar .location a", "cargo_packager_resource_resolver")
|
||||
set-text: ("rustdoc-topbar h2 a", "cargo_packager_resource_resolver")
|
||||
// And we check that the size remained the same.
|
||||
assert-property: (".mobile-topbar .location", {"offsetHeight": |height|})
|
||||
assert-property: ("rustdoc-topbar h2", {"offsetHeight": |height|})
|
||||
|
||||
// Now we check if it works for the non-crate pages as well.
|
||||
go-to: "file://" + |DOC_PATH| + "/test_docs/struct.Foo.html"
|
||||
// We store the height we know is correct.
|
||||
store-property: (".mobile-topbar .location", {"offsetHeight": height})
|
||||
set-text: (".mobile-topbar .location a", "Something_incredibly_long_because")
|
||||
store-property: ("rustdoc-topbar h2", {"offsetHeight": height})
|
||||
set-text: ("rustdoc-topbar h2 a", "Something_incredibly_long_because")
|
||||
// And we check that the size remained the same.
|
||||
assert-property: (".mobile-topbar .location", {"offsetHeight": |height|})
|
||||
assert-property: ("rustdoc-topbar h2", {"offsetHeight": |height|})
|
||||
|
||||
@@ -5,7 +5,7 @@ set-window-size: (400, 600)
|
||||
set-font-size: 18
|
||||
wait-for: 100 // wait a bit for the resize and the font-size change to be fully taken into account.
|
||||
|
||||
assert-property: (".mobile-topbar h2", {"offsetHeight": 33})
|
||||
assert-property: ("rustdoc-topbar h2", {"offsetHeight": 33})
|
||||
|
||||
// On the settings page, the theme buttons should not line-wrap. Instead, they should
|
||||
// all be placed as a group on a line below the setting name "Theme."
|
||||
|
||||
@@ -82,15 +82,6 @@ call-function: ("check-notable-tooltip-position", {
|
||||
"i_x": 528,
|
||||
})
|
||||
|
||||
// Checking on mobile now.
|
||||
set-window-size: (650, 600)
|
||||
wait-for-size: ("body", {"width": 650})
|
||||
call-function: ("check-notable-tooltip-position-complete", {
|
||||
"x": 26,
|
||||
"i_x": 305,
|
||||
"popover_x": 0,
|
||||
})
|
||||
|
||||
// Now check the colors.
|
||||
define-function: (
|
||||
"check-colors",
|
||||
@@ -176,6 +167,15 @@ call-function: (
|
||||
},
|
||||
)
|
||||
|
||||
// Checking on mobile now.
|
||||
set-window-size: (650, 600)
|
||||
wait-for-size: ("body", {"width": 650})
|
||||
call-function: ("check-notable-tooltip-position-complete", {
|
||||
"x": 26,
|
||||
"i_x": 305,
|
||||
"popover_x": 0,
|
||||
})
|
||||
|
||||
reload:
|
||||
|
||||
// Check that pressing escape works
|
||||
@@ -189,7 +189,7 @@ assert: "#method\.create_an_iterator_from_read .tooltip:focus"
|
||||
// Check that clicking outside works.
|
||||
click: "//*[@id='method.create_an_iterator_from_read']//*[@class='tooltip']"
|
||||
assert-count: ("//*[@class='tooltip popover']", 1)
|
||||
click: ".search-input"
|
||||
click: ".main-heading h1"
|
||||
assert-count: ("//*[@class='tooltip popover']", 0)
|
||||
assert-false: "#method\.create_an_iterator_from_read .tooltip:focus"
|
||||
|
||||
@@ -219,14 +219,14 @@ define-function: (
|
||||
store-window-property: {"scrollY": scroll}
|
||||
click: "//*[@id='method.create_an_iterator_from_read']//*[@class='tooltip']"
|
||||
wait-for: "//*[@class='tooltip popover']"
|
||||
click: "#settings-menu a"
|
||||
click: ".main-heading h1"
|
||||
}
|
||||
)
|
||||
|
||||
// Now we check that the focus isn't given back to the wrong item when opening
|
||||
// another popover.
|
||||
call-function: ("setup-popup", {})
|
||||
click: ".search-input"
|
||||
click: ".main-heading h1"
|
||||
// We ensure we didn't come back to the previous focused item.
|
||||
assert-window-property-false: {"scrollY": |scroll|}
|
||||
|
||||
@@ -251,7 +251,7 @@ reload:
|
||||
assert-count: ("//*[@class='tooltip popover']", 0)
|
||||
click: "//*[@id='method.create_an_iterator_from_read']//*[@class='tooltip']"
|
||||
assert-count: ("//*[@class='tooltip popover']", 1)
|
||||
click: "#settings-menu a"
|
||||
click: "rustdoc-toolbar .settings-menu a"
|
||||
wait-for: "#settings"
|
||||
assert-count: ("//*[@class='tooltip popover']", 0)
|
||||
assert-false: "#method\.create_an_iterator_from_read .tooltip:focus"
|
||||
|
||||
@@ -3,33 +3,33 @@ include: "utils.goml"
|
||||
go-to: "file://" + |DOC_PATH| + "/test_docs/index.html?search=test"
|
||||
wait-for: "#crate-search"
|
||||
// First we check that the help menu doesn't exist yet.
|
||||
assert-false: "#help-button .popover"
|
||||
assert-false: "rustdoc-toolbar .help-menu .popover"
|
||||
// Then we display the help menu.
|
||||
click: "#help-button"
|
||||
assert: "#help-button .popover"
|
||||
assert-css: ("#help-button .popover", {"display": "block"})
|
||||
click: "rustdoc-toolbar .help-menu"
|
||||
assert: "rustdoc-toolbar .help-menu .popover"
|
||||
assert-css: ("rustdoc-toolbar .help-menu .popover", {"display": "block"})
|
||||
|
||||
// Now we click somewhere else on the page to ensure it is handling the blur event
|
||||
// correctly.
|
||||
click: ".sidebar"
|
||||
assert-css: ("#help-button .popover", {"display": "none"})
|
||||
assert-false: "rustdoc-toolbar .help-menu .popover"
|
||||
|
||||
// Now we will check that we cannot have two "pocket menus" displayed at the same time.
|
||||
click: "#help-button"
|
||||
assert-css: ("#help-button .popover", {"display": "block"})
|
||||
click: "#settings-menu"
|
||||
assert-css: ("#help-button .popover", {"display": "none"})
|
||||
assert-css: ("#settings-menu .popover", {"display": "block"})
|
||||
click: "rustdoc-toolbar .help-menu"
|
||||
assert-css: ("rustdoc-toolbar .help-menu .popover", {"display": "block"})
|
||||
click: "rustdoc-toolbar .settings-menu"
|
||||
assert-false: "rustdoc-toolbar .help-menu .popover"
|
||||
assert-css: ("rustdoc-toolbar .settings-menu .popover", {"display": "block"})
|
||||
|
||||
// Now the other way.
|
||||
click: "#help-button"
|
||||
assert-css: ("#help-button .popover", {"display": "block"})
|
||||
assert-css: ("#settings-menu .popover", {"display": "none"})
|
||||
click: "rustdoc-toolbar .help-menu"
|
||||
assert-css: ("rustdoc-toolbar .help-menu .popover", {"display": "block"})
|
||||
assert-css: ("rustdoc-toolbar .settings-menu .popover", {"display": "none"})
|
||||
|
||||
// Now verify that clicking the help menu again closes it.
|
||||
click: "#help-button"
|
||||
assert-css: ("#help-button .popover", {"display": "none"})
|
||||
assert-css: ("#settings-menu .popover", {"display": "none"})
|
||||
click: "rustdoc-toolbar .help-menu"
|
||||
assert-false: "rustdoc-toolbar .help-menu .popover"
|
||||
assert-css: (".settings-menu .popover", {"display": "none"})
|
||||
|
||||
define-function: (
|
||||
"check-popover-colors",
|
||||
@@ -37,13 +37,21 @@ define-function: (
|
||||
block {
|
||||
call-function: ("switch-theme", {"theme": |theme|})
|
||||
|
||||
click: "#help-button"
|
||||
click: "rustdoc-toolbar .help-menu"
|
||||
assert-css: (
|
||||
"#help-button .popover",
|
||||
"rustdoc-toolbar .help-menu .popover",
|
||||
{"display": "block", "border-color": |border_color|},
|
||||
)
|
||||
compare-elements-css: ("#help-button .popover", "#help-button .top", ["border-color"])
|
||||
compare-elements-css: ("#help-button .popover", "#help-button .bottom", ["border-color"])
|
||||
compare-elements-css: (
|
||||
"rustdoc-toolbar .help-menu .popover",
|
||||
"rustdoc-toolbar .help-menu .top",
|
||||
["border-color"],
|
||||
)
|
||||
compare-elements-css: (
|
||||
"rustdoc-toolbar .help-menu .popover",
|
||||
"rustdoc-toolbar .help-menu .bottom",
|
||||
["border-color"],
|
||||
)
|
||||
}
|
||||
)
|
||||
|
||||
@@ -63,8 +71,21 @@ call-function: ("check-popover-colors", {
|
||||
|
||||
// Opening the mobile sidebar should close the settings popover.
|
||||
set-window-size: (650, 600)
|
||||
click: "#settings-menu a"
|
||||
assert-css: ("#settings-menu .popover", {"display": "block"})
|
||||
click: "rustdoc-topbar .settings-menu a"
|
||||
assert-css: ("rustdoc-topbar .settings-menu .popover", {"display": "block"})
|
||||
click: ".sidebar-menu-toggle"
|
||||
assert: "//*[@class='sidebar shown']"
|
||||
assert-css: ("#settings-menu .popover", {"display": "none"})
|
||||
assert-css: ("rustdoc-topbar .settings-menu .popover", {"display": "none"})
|
||||
// Opening the settings popover should close the sidebar.
|
||||
click: ".settings-menu a"
|
||||
assert-css: ("rustdoc-topbar .settings-menu .popover", {"display": "block"})
|
||||
assert-false: "//*[@class='sidebar shown']"
|
||||
|
||||
// Opening the settings popover at start (which async loads stuff) should also close.
|
||||
reload:
|
||||
click: ".sidebar-menu-toggle"
|
||||
assert: "//*[@class='sidebar shown']"
|
||||
assert-false: "rustdoc-topbar .settings-menu .popover"
|
||||
click: "rustdoc-topbar .settings-menu a"
|
||||
assert-false: "//*[@class='sidebar shown']"
|
||||
wait-for: "rustdoc-topbar .settings-menu .popover"
|
||||
|
||||
@@ -27,7 +27,7 @@ define-function: (
|
||||
"color": |help_hover_color|,
|
||||
})
|
||||
// Moving the cursor to another item to not break next runs.
|
||||
move-cursor-to: ".search-input"
|
||||
move-cursor-to: "#search-button"
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
@@ -64,8 +64,8 @@ assert-size: (".more-scraped-examples .scraped-example .example-wrap", {
|
||||
store-value: (offset_y, 4)
|
||||
|
||||
// First with desktop
|
||||
assert-position: (".scraped-example", {"y": 256})
|
||||
assert-position: (".scraped-example .prev", {"y": 256 + |offset_y|})
|
||||
assert-position: (".scraped-example", {"y": 214})
|
||||
assert-position: (".scraped-example .prev", {"y": 214 + |offset_y|})
|
||||
|
||||
// Gradient background should be at the top of the code block.
|
||||
assert-css: (".scraped-example .example-wrap::before", {"top": "0px"})
|
||||
@@ -74,8 +74,8 @@ assert-css: (".scraped-example .example-wrap::after", {"bottom": "0px"})
|
||||
// Then with mobile
|
||||
set-window-size: (600, 600)
|
||||
store-size: (".scraped-example .scraped-example-title", {"height": title_height})
|
||||
assert-position: (".scraped-example", {"y": 291})
|
||||
assert-position: (".scraped-example .prev", {"y": 291 + |offset_y| + |title_height|})
|
||||
assert-position: (".scraped-example", {"y": 249})
|
||||
assert-position: (".scraped-example .prev", {"y": 249 + |offset_y| + |title_height|})
|
||||
|
||||
define-function: (
|
||||
"check_title_and_code_position",
|
||||
|
||||
@@ -25,7 +25,7 @@ define-function: (
|
||||
// We put the toggle in the original state.
|
||||
click: ".more-examples-toggle"
|
||||
// Moving cursor away from the toggle line to prevent disrupting next test.
|
||||
move-cursor-to: ".search-input"
|
||||
move-cursor-to: "rustdoc-toolbar #search-button"
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@@ -7,6 +7,7 @@ focus: ".search-input"
|
||||
press-key: "Enter"
|
||||
|
||||
wait-for: "#search-tabs"
|
||||
wait-for-false: "#search-tabs .count.loading"
|
||||
assert-count: ("#search-tabs button", 1)
|
||||
assert-count: (".search-results > a", 1)
|
||||
|
||||
@@ -32,6 +33,7 @@ focus: ".search-input"
|
||||
press-key: "Enter"
|
||||
|
||||
wait-for: "#search-tabs"
|
||||
wait-for-false: "#search-tabs .count.loading"
|
||||
assert-text: ("//div[@class='type-signature']", "F -> WhereWhitespace<T>")
|
||||
assert-count: ("#search-tabs button", 1)
|
||||
assert-count: (".search-results > a", 1)
|
||||
|
||||
@@ -1,101 +1,60 @@
|
||||
// ignore-tidy-linelength
|
||||
include: "utils.goml"
|
||||
|
||||
// Checks that the search tab result tell the user about corrections
|
||||
// First, try a search-by-name
|
||||
go-to: "file://" + |DOC_PATH| + "/test_docs/index.html"
|
||||
// Intentionally wrong spelling of "NotableStructWithLongName"
|
||||
write-into: (".search-input", "NotableStructWithLongNamr")
|
||||
// To be SURE that the search will be run.
|
||||
press-key: 'Enter'
|
||||
// Waiting for the search results to appear...
|
||||
wait-for: "#search-tabs"
|
||||
call-function: ("perform-search", {"query": "NotableStructWithLongNamr"})
|
||||
|
||||
// Corrections aren't shown on the "In Names" tab.
|
||||
assert: "#search-tabs button.selected:first-child"
|
||||
assert-css: (".search-corrections", {
|
||||
"display": "none"
|
||||
})
|
||||
assert-false: ".search-results:nth-child(1) .search-corrections"
|
||||
|
||||
// Corrections do get shown on the "In Parameters" tab.
|
||||
click: "#search-tabs button:nth-child(2)"
|
||||
assert: "#search-tabs button.selected:nth-child(2)"
|
||||
assert-css: (".search-corrections", {
|
||||
"display": "block"
|
||||
})
|
||||
assert-text: (
|
||||
".search-corrections",
|
||||
"Type \"NotableStructWithLongNamr\" not found. Showing results for closest type name \"notablestructwithlongname\" instead."
|
||||
".search-results:nth-child(2) .search-corrections",
|
||||
"Type \"NotableStructWithLongNamr\" not found. Showing results for closest type name \"NotableStructWithLongName\" instead."
|
||||
)
|
||||
|
||||
// Corrections do get shown on the "In Return Type" tab.
|
||||
click: "#search-tabs button:nth-child(3)"
|
||||
assert: "#search-tabs button.selected:nth-child(3)"
|
||||
assert-css: (".search-corrections", {
|
||||
"display": "block"
|
||||
})
|
||||
assert-text: (
|
||||
".search-corrections",
|
||||
"Type \"NotableStructWithLongNamr\" not found. Showing results for closest type name \"notablestructwithlongname\" instead."
|
||||
".search-results:nth-child(3) .search-corrections",
|
||||
"Type \"NotableStructWithLongNamr\" not found. Showing results for closest type name \"NotableStructWithLongName\" instead."
|
||||
)
|
||||
|
||||
// Now, explicit return values
|
||||
go-to: "file://" + |DOC_PATH| + "/test_docs/index.html"
|
||||
// Intentionally wrong spelling of "NotableStructWithLongName"
|
||||
write-into: (".search-input", "-> NotableStructWithLongNamr")
|
||||
// To be SURE that the search will be run.
|
||||
press-key: 'Enter'
|
||||
// Waiting for the search results to appear...
|
||||
wait-for: "#search-tabs"
|
||||
call-function: ("perform-search", {"query": "-> NotableStructWithLongNamr"})
|
||||
|
||||
assert-css: (".search-corrections", {
|
||||
"display": "block"
|
||||
})
|
||||
assert-text: (
|
||||
".search-corrections",
|
||||
"Type \"NotableStructWithLongNamr\" not found. Showing results for closest type name \"notablestructwithlongname\" instead."
|
||||
".search-results.active .search-corrections",
|
||||
"Type \"NotableStructWithLongNamr\" not found and used as generic parameter. Consider searching for \"NotableStructWithLongName\" instead."
|
||||
)
|
||||
|
||||
// Now, generic correction
|
||||
go-to: "file://" + |DOC_PATH| + "/test_docs/index.html"
|
||||
// Intentionally wrong spelling of "NotableStructWithLongName"
|
||||
write-into: (".search-input", "NotableStructWithLongNamr, NotableStructWithLongNamr")
|
||||
// To be SURE that the search will be run.
|
||||
press-key: 'Enter'
|
||||
// Waiting for the search results to appear...
|
||||
wait-for: "#search-tabs"
|
||||
call-function: ("perform-search", {"query": "NotableStructWithLongNamr, NotableStructWithLongNamr"})
|
||||
|
||||
assert-css: (".search-corrections", {
|
||||
"display": "block"
|
||||
})
|
||||
assert-text: (
|
||||
".search-corrections",
|
||||
"Type \"NotableStructWithLongNamr\" not found and used as generic parameter. Consider searching for \"notablestructwithlongname\" instead."
|
||||
".search-failed.active .search-corrections",
|
||||
"Type \"NotableStructWithLongNamr\" not found and used as generic parameter. Consider searching for \"NotableStructWithLongName\" instead."
|
||||
)
|
||||
|
||||
// Now, generic correction plus error
|
||||
go-to: "file://" + |DOC_PATH| + "/test_docs/index.html"
|
||||
// Intentionally wrong spelling of "NotableStructWithLongName"
|
||||
write-into: (".search-input", "Foo<NotableStructWithLongNamr>,y")
|
||||
// To be SURE that the search will be run.
|
||||
press-key: 'Enter'
|
||||
// Waiting for the search results to appear...
|
||||
wait-for: "#search-tabs"
|
||||
call-function: ("perform-search", {"query": "Foo<NotableStructWithLongNamr>,y"})
|
||||
|
||||
assert-css: (".search-corrections", {
|
||||
"display": "block"
|
||||
})
|
||||
assert-text: (
|
||||
".search-corrections",
|
||||
"Type \"NotableStructWithLongNamr\" not found and used as generic parameter. Consider searching for \"notablestructwithlongname\" instead."
|
||||
".search-failed.active .search-corrections",
|
||||
"Type \"NotableStructWithLongNamr\" not found and used as generic parameter. Consider searching for \"NotableStructWithLongName\" instead."
|
||||
)
|
||||
|
||||
go-to: "file://" + |DOC_PATH| + "/test_docs/index.html"
|
||||
// Intentionally wrong spelling of "NotableStructWithLongName"
|
||||
write-into: (".search-input", "generic:NotableStructWithLongNamr<x>,y")
|
||||
// To be SURE that the search will be run.
|
||||
press-key: 'Enter'
|
||||
// Waiting for the search results to appear...
|
||||
wait-for: "#search-tabs"
|
||||
call-function: ("perform-search", {"query": "generic:NotableStructWithLongNamr<x>,y"})
|
||||
|
||||
assert-css: (".error", {
|
||||
"display": "block"
|
||||
|
||||
@@ -8,6 +8,7 @@ define-function: (
|
||||
[theme, error_background],
|
||||
block {
|
||||
call-function: ("switch-theme", {"theme": |theme|})
|
||||
wait-for-false: "#search-tabs .count.loading"
|
||||
wait-for: "#search .error code"
|
||||
assert-css: ("#search .error code", {"background-color": |error_background|})
|
||||
}
|
||||
|
||||
@@ -2,11 +2,7 @@
|
||||
include: "utils.goml"
|
||||
go-to: "file://" + |DOC_PATH| + "/test_docs/index.html"
|
||||
show-text: true
|
||||
write-into: (".search-input", "test")
|
||||
// To be SURE that the search will be run.
|
||||
press-key: 'Enter'
|
||||
// Waiting for the search results to appear...
|
||||
wait-for: "#search-tabs"
|
||||
call-function: ("perform-search", {"query": "test"})
|
||||
assert-text: ("#results .externcrate", "test_docs")
|
||||
|
||||
wait-for: "#crate-search"
|
||||
@@ -21,6 +17,7 @@ press-key: "ArrowDown"
|
||||
press-key: "Enter"
|
||||
// Waiting for the search results to appear...
|
||||
wait-for: "#search-tabs"
|
||||
wait-for-false: "#search-tabs .count.loading"
|
||||
assert-document-property: ({"URL": "&filter-crate="}, CONTAINS)
|
||||
// We check that there is no more "test_docs" appearing.
|
||||
assert-false: "#results .externcrate"
|
||||
@@ -31,7 +28,8 @@ assert-property: ("#crate-search", {"value": "lib2"})
|
||||
// crate filtering.
|
||||
press-key: "Escape"
|
||||
wait-for-css: ("#main-content", {"display": "block"})
|
||||
focus: ".search-input"
|
||||
click: "#search-button"
|
||||
wait-for: ".search-input"
|
||||
wait-for-css: ("#main-content", {"display": "none"})
|
||||
// We check that there is no more "test_docs" appearing.
|
||||
assert-false: "#results .externcrate"
|
||||
@@ -47,6 +45,7 @@ press-key: "ArrowUp"
|
||||
press-key: "Enter"
|
||||
// Waiting for the search results to appear...
|
||||
wait-for: "#search-tabs"
|
||||
wait-for-false: "#search-tabs .count.loading"
|
||||
assert-property: ("#crate-search", {"value": "all crates"})
|
||||
|
||||
// Checking that the URL parameter is taken into account for crate filtering.
|
||||
@@ -56,8 +55,7 @@ assert-property: ("#crate-search", {"value": "lib2"})
|
||||
assert-false: "#results .externcrate"
|
||||
|
||||
// Checking that the text for the "title" is correct (the "all crates" comes from the "<select>").
|
||||
assert-text: (".search-results-title", "Results", STARTS_WITH)
|
||||
assert-text: (".search-results-title + .sub-heading", " in all crates", STARTS_WITH)
|
||||
assert-text: (".search-switcher", "Search results in all crates", STARTS_WITH)
|
||||
|
||||
// Checking the display of the crate filter.
|
||||
// We start with the light theme.
|
||||
@@ -72,7 +70,7 @@ assert-css: ("#crate-search", {
|
||||
})
|
||||
|
||||
// We now check the dark theme.
|
||||
click: "#settings-menu"
|
||||
click: "rustdoc-toolbar .settings-menu"
|
||||
wait-for: "#settings"
|
||||
click: "#theme-dark"
|
||||
wait-for-css: ("#crate-search", {
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
include: "utils.goml"
|
||||
go-to: "file://" + |DOC_PATH| + "/test_docs/index.html?search=test"
|
||||
wait-for: "#search-tabs" // Waiting for the search.js to load.
|
||||
wait-for-false: "#search-tabs .count.loading"
|
||||
show-text: true
|
||||
|
||||
define-function: (
|
||||
@@ -31,7 +32,7 @@ define-function: (
|
||||
},
|
||||
)
|
||||
assert-css: (
|
||||
"#help-button > a",
|
||||
"rustdoc-toolbar .help-menu > a",
|
||||
{
|
||||
"color": |menu_button_a_color|,
|
||||
"border-color": "transparent",
|
||||
@@ -39,9 +40,9 @@ define-function: (
|
||||
},
|
||||
)
|
||||
// Hover help button.
|
||||
move-cursor-to: "#help-button"
|
||||
move-cursor-to: "rustdoc-toolbar .help-menu"
|
||||
assert-css: (
|
||||
"#help-button > a",
|
||||
"rustdoc-toolbar .help-menu > a",
|
||||
{
|
||||
"color": |menu_button_a_color|,
|
||||
"border-color": |menu_button_a_border_hover|,
|
||||
@@ -49,15 +50,15 @@ define-function: (
|
||||
},
|
||||
)
|
||||
// Link color inside
|
||||
click: "#help-button"
|
||||
click: "rustdoc-toolbar .help-menu"
|
||||
assert-css: (
|
||||
"#help a",
|
||||
"rustdoc-toolbar #help a",
|
||||
{
|
||||
"color": |menu_a_color|,
|
||||
},
|
||||
)
|
||||
assert-css: (
|
||||
"#settings-menu > a",
|
||||
"rustdoc-toolbar .settings-menu > a",
|
||||
{
|
||||
"color": |menu_button_a_color|,
|
||||
"border-color": "transparent",
|
||||
@@ -65,9 +66,9 @@ define-function: (
|
||||
},
|
||||
)
|
||||
// Hover settings menu.
|
||||
move-cursor-to: "#settings-menu"
|
||||
move-cursor-to: "rustdoc-toolbar .settings-menu"
|
||||
assert-css: (
|
||||
"#settings-menu:hover > a",
|
||||
"rustdoc-toolbar .settings-menu:hover > a",
|
||||
{
|
||||
"color": |menu_button_a_color|,
|
||||
"border-color": |menu_button_a_border_hover|,
|
||||
@@ -120,8 +121,10 @@ call-function: (
|
||||
// Check that search input correctly decodes form encoding.
|
||||
go-to: "file://" + |DOC_PATH| + "/test_docs/index.html?search=a+b"
|
||||
wait-for: "#search-tabs" // Waiting for the search.js to load.
|
||||
wait-for-false: "#search-tabs .count.loading"
|
||||
assert-property: (".search-input", { "value": "a b" })
|
||||
// Check that literal + is not treated as space.
|
||||
go-to: "file://" + |DOC_PATH| + "/test_docs/index.html?search=a%2Bb"
|
||||
wait-for: "#search-tabs" // Waiting for the search.js to load.
|
||||
wait-for-false: "#search-tabs .count.loading"
|
||||
assert-property: (".search-input", { "value": "a+b" })
|
||||
|
||||
@@ -2,10 +2,13 @@
|
||||
// The PR which fixed it is: https://github.com/rust-lang/rust/pull/81592
|
||||
go-to: "file://" + |DOC_PATH| + "/test_docs/index.html"
|
||||
set-window-size: (463, 700)
|
||||
// We first check that the search input isn't already focused.
|
||||
assert-false: ("input.search-input:focus")
|
||||
click: "input.search-input"
|
||||
click: "#search-button"
|
||||
wait-for: ".search-input"
|
||||
assert: "input.search-input:focus"
|
||||
|
||||
go-to: "file://" + |DOC_PATH| + "/test_docs/index.html"
|
||||
reload:
|
||||
set-window-size: (750, 700)
|
||||
click: "input.search-input"
|
||||
assert: ("input.search-input:focus")
|
||||
click: "#search-button"
|
||||
wait-for: ".search-input"
|
||||
assert: "input.search-input:focus"
|
||||
|
||||
@@ -1,28 +1,25 @@
|
||||
// Checks that the search tab results work correctly with function signature syntax
|
||||
// First, try a search-by-name
|
||||
include: "utils.goml"
|
||||
go-to: "file://" + |DOC_PATH| + "/test_docs/index.html"
|
||||
write-into: (".search-input", "Foo")
|
||||
// To be SURE that the search will be run.
|
||||
press-key: 'Enter'
|
||||
// Waiting for the search results to appear...
|
||||
wait-for: "#search-tabs"
|
||||
call-function: ("perform-search", {"query": "Foo"})
|
||||
|
||||
// Now use the keyboard commands to switch to the third result.
|
||||
press-key: "ArrowDown"
|
||||
press-key: "ArrowDown"
|
||||
press-key: "ArrowDown"
|
||||
assert: ".search-results.active > a:focus:nth-of-type(3)"
|
||||
wait-for: ".search-results.active > a:focus:nth-of-type(3)"
|
||||
|
||||
// Now switch to the second tab, then back to the first one, then arrow back up.
|
||||
press-key: "ArrowRight"
|
||||
assert: ".search-results.active:nth-of-type(2) > a:focus:nth-of-type(1)"
|
||||
wait-for: ".search-results.active:nth-of-type(2) > a:focus:nth-of-type(1)"
|
||||
press-key: "ArrowLeft"
|
||||
assert: ".search-results.active:nth-of-type(1) > a:focus:nth-of-type(3)"
|
||||
wait-for: ".search-results.active:nth-of-type(1) > a:focus:nth-of-type(3)"
|
||||
press-key: "ArrowUp"
|
||||
assert: ".search-results.active > a:focus:nth-of-type(2)"
|
||||
wait-for: ".search-results.active > a:focus:nth-of-type(2)"
|
||||
press-key: "ArrowUp"
|
||||
assert: ".search-results.active > a:focus:nth-of-type(1)"
|
||||
wait-for: ".search-results.active > a:focus:nth-of-type(1)"
|
||||
press-key: "ArrowUp"
|
||||
assert: ".search-input:focus"
|
||||
wait-for: ".search-input:focus"
|
||||
press-key: "ArrowDown"
|
||||
assert: ".search-results.active > a:focus:nth-of-type(1)"
|
||||
wait-for: ".search-results.active > a:focus:nth-of-type(1)"
|
||||
|
||||
@@ -6,10 +6,8 @@ call-function: ("switch-theme", {"theme": "dark"})
|
||||
// First we check that the reexport has the correct ID and no background color.
|
||||
assert-text: ("//*[@id='reexport.TheStdReexport']", "pub use ::std as TheStdReexport;")
|
||||
assert-css: ("//*[@id='reexport.TheStdReexport']", {"background-color": "rgba(0, 0, 0, 0)"})
|
||||
write-into: (".search-input", "TheStdReexport")
|
||||
// To be SURE that the search will be run.
|
||||
press-key: 'Enter'
|
||||
wait-for: "//a[@class='result-import']"
|
||||
call-function: ("perform-search", {"query": "TheStdReexport"})
|
||||
assert: "//a[@class='result-import']"
|
||||
assert-attribute: (
|
||||
"//a[@class='result-import']",
|
||||
{"href": "../test_docs/index.html#reexport.TheStdReexport"},
|
||||
@@ -21,9 +19,8 @@ wait-for-css: ("//*[@id='reexport.TheStdReexport']", {"background-color": "#494a
|
||||
|
||||
// We now check that the alias is working as well on the reexport.
|
||||
// To be SURE that the search will be run.
|
||||
press-key: 'Enter'
|
||||
write-into: (".search-input", "AliasForTheStdReexport")
|
||||
wait-for: "//a[@class='result-import']"
|
||||
call-function: ("perform-search", {"query": "AliasForTheStdReexport"})
|
||||
assert: "//a[@class='result-import']"
|
||||
assert-text: (
|
||||
"a.result-import .result-name",
|
||||
"re-export AliasForTheStdReexport - see test_docs::TheStdReexport",
|
||||
|
||||
@@ -14,6 +14,7 @@ define-function: (
|
||||
|
||||
// Waiting for the search results to appear...
|
||||
wait-for: "#search-tabs"
|
||||
wait-for-false: "#search-tabs .count.loading"
|
||||
assert-css: (
|
||||
"#search-tabs > button > .count",
|
||||
{"color": |count_color|},
|
||||
@@ -212,11 +213,7 @@ call-function: ("check-search-color", {
|
||||
// Check the alias.
|
||||
go-to: "file://" + |DOC_PATH| + "/test_docs/index.html"
|
||||
|
||||
write-into: (".search-input", "thisisanalias")
|
||||
// To be SURE that the search will be run.
|
||||
press-key: 'Enter'
|
||||
// Waiting for the search results to appear...
|
||||
wait-for: "#search-tabs"
|
||||
call-function: ("perform-search", {"query": "thisisanalias"})
|
||||
|
||||
define-function: (
|
||||
"check-alias",
|
||||
|
||||
@@ -2,4 +2,5 @@
|
||||
go-to: "file://" + |DOC_PATH| + "/test_docs/index.html?search=some_more_function"
|
||||
// Waiting for the search results to appear...
|
||||
wait-for: "#search-tabs"
|
||||
wait-for-false: "#search-tabs .count.loading"
|
||||
assert-text: (".search-results .desc code", "format!")
|
||||
|
||||
@@ -7,6 +7,7 @@ write-into: (".search-input", "test")
|
||||
// To be SURE that the search will be run.
|
||||
press-key: 'Enter'
|
||||
wait-for: "#crate-search"
|
||||
wait-for-false: "#search-tabs .count.loading"
|
||||
// The width is returned by "getComputedStyle" which returns the exact number instead of the
|
||||
// CSS rule which is "50%"...
|
||||
assert-size: (".search-results div.desc", {"width": 248})
|
||||
@@ -34,6 +35,7 @@ assert: |new_width| < |width| - 10
|
||||
// Check that if the search is too long on mobile, it'll go under the "typename".
|
||||
go-to: "file://" + |DOC_PATH| + "/test_docs/index.html?search=SuperIncrediblyLongLongLongLongLongLongLongGigaGigaGigaMegaLongLongLongStructName"
|
||||
wait-for: "#crate-search"
|
||||
wait-for-false: "#search-tabs .count.loading"
|
||||
compare-elements-position-near: (
|
||||
".search-results .result-name .typename",
|
||||
".search-results .result-name .path",
|
||||
@@ -51,7 +53,7 @@ set-window-size: (900, 900)
|
||||
|
||||
// First we check the current width, height and position.
|
||||
assert-css: ("#crate-search", {"width": "159px"})
|
||||
store-size: (".search-results-title", {
|
||||
store-size: (".search-switcher", {
|
||||
"height": search_results_title_height,
|
||||
"width": search_results_title_width,
|
||||
})
|
||||
@@ -64,8 +66,8 @@ set-text: (
|
||||
)
|
||||
|
||||
// Then we compare again to confirm the height didn't change.
|
||||
assert-size: ("#crate-search", {"width": 370})
|
||||
assert-size: (".search-results-title", {
|
||||
assert-size: ("#crate-search", {"width": 185})
|
||||
assert-size: (".search-switcher", {
|
||||
"height": |search_results_title_height|,
|
||||
})
|
||||
assert-css: ("#search", {"width": "640px"})
|
||||
@@ -79,6 +81,7 @@ define-function: (
|
||||
block {
|
||||
call-function: ("switch-theme", {"theme": |theme|})
|
||||
wait-for: "#crate-search"
|
||||
wait-for-false: "#search-tabs .count.loading"
|
||||
assert-css: ("#crate-search", {"border": "1px solid " + |border|})
|
||||
assert-css: ("#crate-search-div::after", {"filter": |filter|})
|
||||
move-cursor-to: "#crate-search"
|
||||
|
||||
@@ -9,6 +9,7 @@ assert-text-false: (".main-heading h1", "Struct test_docs::FooCopy item path")
|
||||
go-to: "file://" + |DOC_PATH| + "/test_docs/index.html?search=struct%3AFoo"
|
||||
// Waiting for the search results to appear...
|
||||
wait-for: "#search-tabs"
|
||||
wait-for-false: "#search-tabs .count.loading"
|
||||
assert-text-false: (".main-heading h1", "Struct test_docs::FooCopy item path")
|
||||
// Ensure that the search results are displayed, not the "normal" content.
|
||||
assert-css: ("#main-content", {"display": "none"})
|
||||
@@ -17,4 +18,4 @@ assert-css: ("#main-content", {"display": "none"})
|
||||
go-to: "file://" + |DOC_PATH| + "/test_docs/index.html?search=struct%3AFoo&go_to_first=true"
|
||||
// Waiting for the page to load...
|
||||
wait-for-text: (".main-heading .rustdoc-breadcrumbs", "test_docs")
|
||||
wait-for-text: (".main-heading h1", "Struct FooCopy item path")
|
||||
wait-for-text: (".main-heading h1", "Struct Foo Copy item path")
|
||||
|
||||
@@ -1,15 +1,12 @@
|
||||
// ignore-tidy-linelength
|
||||
include: "utils.goml"
|
||||
|
||||
// Checks that, if a type has two methods with the same name, they both get
|
||||
// linked correctly.
|
||||
go-to: "file://" + |DOC_PATH| + "/test_docs/index.html"
|
||||
|
||||
// This should link to the inherent impl
|
||||
write-into: (".search-input", "ZyxwvutMethodDisambiguation -> bool")
|
||||
// To be SURE that the search will be run.
|
||||
press-key: 'Enter'
|
||||
// Waiting for the search results to appear...
|
||||
wait-for: "#search-tabs"
|
||||
call-function: ("perform-search", {"query": "ZyxwvutMethodDisambiguation -> bool"})
|
||||
// Check the disambiguated link.
|
||||
assert-count: ("a.result-method", 1)
|
||||
assert-attribute: ("a.result-method", {
|
||||
@@ -25,11 +22,7 @@ assert: "section:target"
|
||||
go-to: "file://" + |DOC_PATH| + "/test_docs/index.html"
|
||||
|
||||
// This should link to the trait impl
|
||||
write-into: (".search-input", "ZyxwvutMethodDisambiguation, usize -> usize")
|
||||
// To be SURE that the search will be run.
|
||||
press-key: 'Enter'
|
||||
// Waiting for the search results to appear...
|
||||
wait-for: "#search-tabs"
|
||||
call-function: ("perform-search", {"query": "ZyxwvutMethodDisambiguation, usize -> usize"})
|
||||
// Check the disambiguated link.
|
||||
assert-count: ("a.result-method", 1)
|
||||
assert-attribute: ("a.result-method", {
|
||||
@@ -47,6 +40,7 @@ assert: "section:target"
|
||||
// impl block's disambiguator is also acted upon.
|
||||
go-to: "file://" + |DOC_PATH| + "/lib2/index.html?search=MultiImplBlockStruct->bool"
|
||||
wait-for: "#search-tabs"
|
||||
wait-for-false: "#search-tabs .count.loading"
|
||||
assert-count: ("a.result-method", 1)
|
||||
assert-attribute: ("a.result-method", {
|
||||
"href": "../lib2/another_mod/struct.MultiImplBlockStruct.html#impl-MultiImplBlockStruct/method.second_fn"
|
||||
@@ -56,6 +50,7 @@ wait-for: "details:has(summary > #impl-MultiImplBlockStruct-1) > div section[id=
|
||||
|
||||
go-to: "file://" + |DOC_PATH| + "/lib2/index.html?search=MultiImplBlockStruct->u32"
|
||||
wait-for: "#search-tabs"
|
||||
wait-for-false: "#search-tabs .count.loading"
|
||||
assert-count: ("a.result-method", 1)
|
||||
assert-attribute: ("a.result-method", {
|
||||
"href": "../lib2/another_mod/struct.MultiImplBlockStruct.html#impl-MultiImplBlockTrait-for-MultiImplBlockStruct/method.second_fn"
|
||||
|
||||
@@ -1,8 +1,5 @@
|
||||
// Checks that the "keyword" results have the expected text alongside them.
|
||||
include: "utils.goml"
|
||||
go-to: "file://" + |DOC_PATH| + "/test_docs/index.html"
|
||||
write-into: (".search-input", "for")
|
||||
// To be SURE that the search will be run.
|
||||
press-key: 'Enter'
|
||||
// Waiting for the search results to appear...
|
||||
wait-for: "#search-tabs"
|
||||
call-function: ("perform-search", {"query": "for"})
|
||||
assert-text: (".result-keyword .result-name", "keyword for")
|
||||
|
||||
@@ -1,11 +1,9 @@
|
||||
// Checks that the search tab results work correctly with function signature syntax
|
||||
// First, try a search-by-name
|
||||
include: "utils.goml"
|
||||
go-to: "file://" + |DOC_PATH| + "/test_docs/index.html"
|
||||
write-into: (".search-input", "Foo")
|
||||
// To be SURE that the search will be run.
|
||||
press-key: 'Enter'
|
||||
// Waiting for the search results to appear...
|
||||
wait-for: "#search-tabs"
|
||||
call-function: ("perform-search", {"query": "Foo"})
|
||||
|
||||
assert-attribute: ("#search-tabs > button:nth-of-type(1)", {"class": "selected"})
|
||||
assert-text: ("#search-tabs > button:nth-of-type(1)", "In Names", STARTS_WITH)
|
||||
assert: "input.search-input:focus"
|
||||
@@ -23,11 +21,7 @@ wait-for-attribute: ("#search-tabs > button:nth-of-type(3)", {"class": "selected
|
||||
|
||||
// Now try search-by-return
|
||||
go-to: "file://" + |DOC_PATH| + "/test_docs/index.html"
|
||||
write-into: (".search-input", "-> String")
|
||||
// To be SURE that the search will be run.
|
||||
press-key: 'Enter'
|
||||
// Waiting for the search results to appear...
|
||||
wait-for: "#search-tabs"
|
||||
call-function: ("perform-search", {"query": "-> String"})
|
||||
assert-attribute: ("#search-tabs > button:nth-of-type(1)", {"class": "selected"})
|
||||
assert-text: ("#search-tabs > button:nth-of-type(1)", "In Function Return Types", STARTS_WITH)
|
||||
assert: "input.search-input:focus"
|
||||
@@ -45,30 +39,18 @@ wait-for-attribute: ("#search-tabs > button:nth-of-type(1)", {"class": "selected
|
||||
|
||||
// Try with a search-by-return with no results
|
||||
go-to: "file://" + |DOC_PATH| + "/test_docs/index.html"
|
||||
write-into: (".search-input", "-> Something")
|
||||
// To be SURE that the search will be run.
|
||||
press-key: 'Enter'
|
||||
// Waiting for the search results to appear...
|
||||
wait-for: "#search-tabs"
|
||||
call-function: ("perform-search", {"query": "-> Something"})
|
||||
assert-attribute: ("#search-tabs > button:nth-of-type(1)", {"class": "selected"})
|
||||
assert-text: ("#search-tabs > button:nth-of-type(1)", "In Function Return Types", STARTS_WITH)
|
||||
|
||||
// Try with a search-by-parameter
|
||||
go-to: "file://" + |DOC_PATH| + "/test_docs/index.html"
|
||||
write-into: (".search-input", "usize,pattern")
|
||||
// To be SURE that the search will be run.
|
||||
press-key: 'Enter'
|
||||
// Waiting for the search results to appear...
|
||||
wait-for: "#search-tabs"
|
||||
call-function: ("perform-search", {"query": "usize,pattern"})
|
||||
assert-attribute: ("#search-tabs > button:nth-of-type(1)", {"class": "selected"})
|
||||
assert-text: ("#search-tabs > button:nth-of-type(1)", "In Function Parameters", STARTS_WITH)
|
||||
|
||||
// Try with a search-by-parameter-and-return
|
||||
go-to: "file://" + |DOC_PATH| + "/test_docs/index.html"
|
||||
write-into: (".search-input", "pattern -> str")
|
||||
// To be SURE that the search will be run.
|
||||
press-key: 'Enter'
|
||||
// Waiting for the search results to appear...
|
||||
wait-for: "#search-tabs"
|
||||
call-function: ("perform-search", {"query": "pattern -> str"})
|
||||
assert-attribute: ("#search-tabs > button:nth-of-type(1)", {"class": "selected"})
|
||||
assert-text: ("#search-tabs > button:nth-of-type(1)", "In Function Signatures", STARTS_WITH)
|
||||
|
||||
@@ -15,7 +15,8 @@ define-function: (
|
||||
focus: ".search-input"
|
||||
press-key: "Enter"
|
||||
|
||||
wait-for: "#search-tabs"
|
||||
wait-for: "#search-tabs .count"
|
||||
wait-for-false: "#search-tabs .count.loading"
|
||||
assert-css: ("#search-tabs > button:not(.selected)", {
|
||||
"background-color": |background|,
|
||||
"border-bottom": |border_bottom|,
|
||||
|
||||
@@ -5,10 +5,7 @@ go-to: "file://" + |DOC_PATH| + "/test_docs/index.html"
|
||||
store-value: (title, "test_docs - Rust")
|
||||
assert-document-property: {"title": |title|}
|
||||
|
||||
write-into: (".search-input", "test")
|
||||
// To be SURE that the search will be run.
|
||||
press-key: 'Enter'
|
||||
wait-for: "#crate-search"
|
||||
call-function: ("perform-search", {"query": "test"})
|
||||
|
||||
assert-document-property: {"title": '"test" Search - Rust'}
|
||||
|
||||
@@ -16,6 +13,7 @@ set-property: (".search-input", {"value": "another one"})
|
||||
// To be SURE that the search will be run.
|
||||
press-key: 'Enter'
|
||||
wait-for: "#crate-search"
|
||||
wait-for-false: "#search-tabs .count.loading"
|
||||
|
||||
assert-document-property: {"title": '"another one" Search - Rust'}
|
||||
|
||||
|
||||
@@ -9,7 +9,7 @@ define-function: (
|
||||
[storage_value, setting_attribute_value, toggle_attribute_value],
|
||||
block {
|
||||
assert-local-storage: {"rustdoc-auto-hide-large-items": |storage_value|}
|
||||
click: "#settings-menu"
|
||||
click: "rustdoc-toolbar .settings-menu"
|
||||
wait-for: "#settings"
|
||||
assert-property: ("#auto-hide-large-items", {"checked": |setting_attribute_value|})
|
||||
assert-attribute: (".item-decl .type-contents-toggle", {"open": |toggle_attribute_value|})
|
||||
|
||||
@@ -6,7 +6,7 @@ define-function: (
|
||||
[storage_value, setting_attribute_value, toggle_attribute_value],
|
||||
block {
|
||||
assert-local-storage: {"rustdoc-auto-hide-method-docs": |storage_value|}
|
||||
click: "#settings-menu"
|
||||
click: "rustdoc-toolbar .settings-menu"
|
||||
wait-for: "#settings"
|
||||
assert-property: ("#auto-hide-method-docs", {"checked": |setting_attribute_value|})
|
||||
assert-attribute: (".toggle.method-toggle", {"open": |toggle_attribute_value|})
|
||||
|
||||
@@ -5,7 +5,7 @@ define-function: (
|
||||
[storage_value, setting_attribute_value, toggle_attribute_value],
|
||||
block {
|
||||
assert-local-storage: {"rustdoc-auto-hide-trait-implementations": |storage_value|}
|
||||
click: "#settings-menu"
|
||||
click: "rustdoc-toolbar .settings-menu"
|
||||
wait-for: "#settings"
|
||||
assert-property: ("#auto-hide-trait-implementations", {"checked": |setting_attribute_value|})
|
||||
assert-attribute: ("#trait-implementations-list > details", {"open": |toggle_attribute_value|}, ALL)
|
||||
|
||||
@@ -5,7 +5,7 @@ define-function: (
|
||||
[storage_value, setting_attribute_value],
|
||||
block {
|
||||
assert-local-storage: {"rustdoc-go-to-only-result": |storage_value|}
|
||||
click: "#settings-menu"
|
||||
click: "rustdoc-toolbar .settings-menu"
|
||||
wait-for: "#settings"
|
||||
assert-property: ("#go-to-only-result", {"checked": |setting_attribute_value|})
|
||||
}
|
||||
@@ -25,7 +25,7 @@ wait-for: "#search"
|
||||
assert-document-property: ({"URL": "/lib2/index.html"}, CONTAINS)
|
||||
|
||||
// Now we change its value.
|
||||
click: "#settings-menu"
|
||||
click: "rustdoc-toolbar .settings-menu"
|
||||
wait-for: "#settings"
|
||||
click: "#go-to-only-result"
|
||||
assert-local-storage: {"rustdoc-go-to-only-result": "true"}
|
||||
|
||||
@@ -9,7 +9,7 @@ define-function: (
|
||||
[theme, filter],
|
||||
block {
|
||||
call-function: ("switch-theme", {"theme": |theme|})
|
||||
assert-css: ("#settings-menu > a::before", {
|
||||
assert-css: ("rustdoc-toolbar .settings-menu > a::before", {
|
||||
"filter": |filter|,
|
||||
"width": "18px",
|
||||
"height": "18px",
|
||||
|
||||
@@ -5,7 +5,7 @@ show-text: true // needed when we check for colors below.
|
||||
// First, we check that the settings page doesn't exist.
|
||||
assert-false: "#settings"
|
||||
// We now click on the settings button.
|
||||
click: "#settings-menu"
|
||||
click: "rustdoc-toolbar .settings-menu"
|
||||
wait-for: "#settings"
|
||||
assert-css: ("#settings", {"display": "block"})
|
||||
|
||||
@@ -13,11 +13,11 @@ assert-css: ("#settings", {"display": "block"})
|
||||
store-css: (".setting-line", {"margin": setting_line_margin})
|
||||
|
||||
// Let's close it by clicking on the same button.
|
||||
click: "#settings-menu"
|
||||
click: "rustdoc-toolbar .settings-menu"
|
||||
wait-for-css: ("#settings", {"display": "none"})
|
||||
|
||||
// Let's check that pressing "ESCAPE" is closing it.
|
||||
click: "#settings-menu"
|
||||
click: "rustdoc-toolbar .settings-menu"
|
||||
wait-for-css: ("#settings", {"display": "block"})
|
||||
press-key: "Escape"
|
||||
wait-for-css: ("#settings", {"display": "none"})
|
||||
@@ -28,7 +28,7 @@ write: "test"
|
||||
// To be SURE that the search will be run.
|
||||
press-key: 'Enter'
|
||||
wait-for: "#alternative-display #search"
|
||||
click: "#settings-menu"
|
||||
click: "rustdoc-toolbar .settings-menu"
|
||||
wait-for-css: ("#settings", {"display": "block"})
|
||||
// Ensure that the search is still displayed.
|
||||
wait-for: "#alternative-display #search"
|
||||
@@ -41,7 +41,7 @@ set-local-storage: {"rustdoc-theme": "dark", "rustdoc-use-system-theme": "false"
|
||||
// We reload the page so the local storage settings are being used.
|
||||
reload:
|
||||
|
||||
click: "#settings-menu"
|
||||
click: "rustdoc-toolbar .settings-menu"
|
||||
wait-for: "#settings"
|
||||
|
||||
// We check that the "Use system theme" is disabled.
|
||||
@@ -55,7 +55,7 @@ assert: "#preferred-light-theme.setting-line.hidden"
|
||||
assert-property: ("#theme .setting-radio-choices #theme-dark", {"checked": "true"})
|
||||
|
||||
// Some style checks...
|
||||
move-cursor-to: "#settings-menu > a"
|
||||
move-cursor-to: "rustdoc-toolbar .settings-menu > a"
|
||||
// First we check the "default" display for radio buttons.
|
||||
assert-css: (
|
||||
"#theme-dark",
|
||||
@@ -194,7 +194,7 @@ assert-css: (
|
||||
"border-width": "2px",
|
||||
},
|
||||
)
|
||||
move-cursor-to: "#settings-menu > a"
|
||||
move-cursor-to: "rustdoc-toolbar .settings-menu > a"
|
||||
// Let's now check with the focus for toggles.
|
||||
focus: "#auto-hide-large-items"
|
||||
assert-css: (
|
||||
@@ -273,43 +273,43 @@ assert-local-storage: {"rustdoc-disable-shortcuts": "true"}
|
||||
press-key: "Escape"
|
||||
press-key: "?"
|
||||
assert-false: "#help-button .popover"
|
||||
wait-for-css: ("#settings-menu .popover", {"display": "block"})
|
||||
wait-for-css: ("rustdoc-toolbar .settings-menu .popover", {"display": "block"})
|
||||
|
||||
// Now turn keyboard shortcuts back on, and see if they work.
|
||||
click: "#disable-shortcuts"
|
||||
assert-local-storage: {"rustdoc-disable-shortcuts": "false"}
|
||||
press-key: "Escape"
|
||||
press-key: "?"
|
||||
wait-for-css: ("#help-button .popover", {"display": "block"})
|
||||
assert-css: ("#settings-menu .popover", {"display": "none"})
|
||||
wait-for-css: ("rustdoc-toolbar .help-menu .popover", {"display": "block"})
|
||||
assert-css: ("rustdoc-toolbar .settings-menu .popover", {"display": "none"})
|
||||
|
||||
// Now switch back to the settings popover, and make sure the keyboard
|
||||
// shortcut works when a check box is selected.
|
||||
click: "#settings-menu > a"
|
||||
wait-for-css: ("#settings-menu .popover", {"display": "block"})
|
||||
click: "rustdoc-toolbar .settings-menu > a"
|
||||
wait-for-css: ("rustdoc-toolbar .settings-menu .popover", {"display": "block"})
|
||||
focus: "#auto-hide-large-items"
|
||||
press-key: "?"
|
||||
wait-for-css: ("#settings-menu .popover", {"display": "none"})
|
||||
wait-for-css: ("#help-button .popover", {"display": "block"})
|
||||
wait-for-css: ("rustdoc-toolbar .settings-menu .popover", {"display": "none"})
|
||||
wait-for-css: ("rustdoc-toolbar .help-menu .popover", {"display": "block"})
|
||||
|
||||
// Now switch back to the settings popover, and make sure the keyboard
|
||||
// shortcut works when a check box is selected.
|
||||
click: "#settings-menu > a"
|
||||
wait-for-css: ("#settings-menu .popover", {"display": "block"})
|
||||
wait-for-css: ("#help-button .popover", {"display": "none"})
|
||||
click: "rustdoc-toolbar .settings-menu > a"
|
||||
wait-for-css: ("rustdoc-toolbar .settings-menu .popover", {"display": "block"})
|
||||
assert-false: "rustdoc-toolbar .help-menu .popover"
|
||||
focus: "#theme-system-preference"
|
||||
press-key: "?"
|
||||
wait-for-css: ("#settings-menu .popover", {"display": "none"})
|
||||
wait-for-css: ("#help-button .popover", {"display": "block"})
|
||||
wait-for-css: ("rustdoc-toolbar .settings-menu .popover", {"display": "none"})
|
||||
wait-for-css: ("rustdoc-toolbar .help-menu .popover", {"display": "block"})
|
||||
|
||||
// Now we go to the settings page to check that the CSS is loaded as expected.
|
||||
go-to: "file://" + |DOC_PATH| + "/settings.html"
|
||||
wait-for: "#settings"
|
||||
assert-false: "#settings-menu"
|
||||
assert-false: "rustdoc-toolbar .settings-menu"
|
||||
assert-css: (".setting-radio", {"cursor": "pointer"})
|
||||
|
||||
assert-attribute-false: ("#settings", {"class": "popover"}, CONTAINS)
|
||||
compare-elements-position: (".sub form", "#settings", ["x"])
|
||||
compare-elements-position: (".main-heading", "#settings", ["x"])
|
||||
|
||||
// Check that setting-line has the same margin in this mode as in the popover.
|
||||
assert-css: (".setting-line", {"margin": |setting_line_margin|})
|
||||
|
||||
@@ -8,9 +8,9 @@ press-key: "Escape"
|
||||
assert-false: "input.search-input:focus"
|
||||
// We now check for the help popup.
|
||||
press-key: "?"
|
||||
assert-css: ("#help-button .popover", {"display": "block"})
|
||||
assert-css: ("rustdoc-toolbar .help-menu .popover", {"display": "block"})
|
||||
press-key: "Escape"
|
||||
assert-css: ("#help-button .popover", {"display": "none"})
|
||||
assert-false: "rustdoc-toolbar .help-menu .popover"
|
||||
// Checking doc collapse and expand.
|
||||
// It should be displaying a "-":
|
||||
assert-text: ("#toggle-all-docs", "Summary")
|
||||
|
||||
@@ -17,7 +17,7 @@ assert-css: (".sidebar", {"display": "block", "left": "-1000px"})
|
||||
focus: ".sidebar-elems h3 a"
|
||||
assert-css: (".sidebar", {"display": "block", "left": "0px"})
|
||||
// When we tab out of the sidebar, close it.
|
||||
focus: ".search-input"
|
||||
focus: "#search-button"
|
||||
assert-css: (".sidebar", {"display": "block", "left": "-1000px"})
|
||||
|
||||
// Open the sidebar menu.
|
||||
@@ -43,7 +43,7 @@ press-key: "Escape"
|
||||
assert-css: (".sidebar", {"display": "block", "left": "-1000px"})
|
||||
|
||||
// Check that the topbar is visible
|
||||
assert-property: (".mobile-topbar", {"clientHeight": "45"})
|
||||
assert-property: ("rustdoc-topbar", {"clientHeight": "45"})
|
||||
|
||||
// Check that clicking an element from the sidebar scrolls to the right place
|
||||
// so the target is not obscured by the topbar.
|
||||
@@ -54,7 +54,7 @@ assert-position: ("#method\.must_use", {"y": 46})
|
||||
// Check that the bottom-most item on the sidebar menu can be scrolled fully into view.
|
||||
click: ".sidebar-menu-toggle"
|
||||
scroll-to: ".block.keyword li:nth-child(1)"
|
||||
compare-elements-position-near: (".block.keyword li:nth-child(1)", ".mobile-topbar", {"y": 544})
|
||||
compare-elements-position-near: (".block.keyword li:nth-child(1)", "rustdoc-topbar", {"y": 544})
|
||||
|
||||
// Now checking the background color of the sidebar.
|
||||
// Close the sidebar menu.
|
||||
@@ -65,7 +65,7 @@ define-function: (
|
||||
"check-colors",
|
||||
[theme, color, background],
|
||||
block {
|
||||
call-function: ("switch-theme", {"theme": |theme|})
|
||||
call-function: ("switch-theme-mobile", {"theme": |theme|})
|
||||
reload:
|
||||
|
||||
// Open the sidebar menu.
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
go-to: "file://" + |DOC_PATH| + "/test_docs/index.html"
|
||||
assert-property: (".sidebar", {"clientWidth": "199"})
|
||||
show-text: true
|
||||
click: "#settings-menu"
|
||||
click: "rustdoc-toolbar .settings-menu"
|
||||
wait-for: "#settings"
|
||||
assert-css: ("#settings", {"display": "block"})
|
||||
// normal resizing
|
||||
@@ -12,7 +12,7 @@ assert-css: ("#settings", {"display": "none"})
|
||||
|
||||
// Now same thing, but for source code
|
||||
go-to: "file://" + |DOC_PATH| + "/src/test_docs/lib.rs.html"
|
||||
click: "#settings-menu"
|
||||
click: "rustdoc-toolbar .settings-menu"
|
||||
wait-for: "#settings"
|
||||
assert-css: ("#settings", {"display": "block"})
|
||||
assert-property: (".sidebar", {"clientWidth": "49"})
|
||||
|
||||
@@ -4,7 +4,7 @@ assert-property: (".sidebar", {"clientWidth": "199"})
|
||||
show-text: true
|
||||
|
||||
// Verify that the "hide" option is unchecked
|
||||
click: "#settings-menu"
|
||||
click: "rustdoc-toolbar .settings-menu"
|
||||
wait-for: "#settings"
|
||||
assert-css: ("#settings", {"display": "block"})
|
||||
assert-property: ("#hide-sidebar", {"checked": "false"})
|
||||
@@ -15,7 +15,7 @@ drag-and-drop: ((205, 100), (5, 100))
|
||||
assert-css: (".sidebar", {"display": "none"})
|
||||
|
||||
// Verify that the "hide" option is checked
|
||||
focus: "#settings-menu a"
|
||||
focus: "rustdoc-toolbar .settings-menu a"
|
||||
press-key: "Enter"
|
||||
wait-for-css: ("#settings", {"display": "block"})
|
||||
assert-property: ("#hide-sidebar", {"checked": "true"})
|
||||
@@ -24,28 +24,28 @@ wait-for-css: (".sidebar", {"display": "block"})
|
||||
|
||||
// Verify that hiding the sidebar hides the source sidebar
|
||||
// and puts the button in static position mode on mobile
|
||||
go-to: "file://" + |DOC_PATH| + "/src/test_docs/lib.rs.html"
|
||||
go-to: "file://" + |DOC_PATH| + "/test_docs/index.html"
|
||||
set-window-size: (600, 600)
|
||||
focus: "#settings-menu a"
|
||||
focus: "rustdoc-topbar .settings-menu a"
|
||||
press-key: "Enter"
|
||||
wait-for-css: ("#settings", {"display": "block"})
|
||||
wait-for-css: ("#sidebar-button", {"position": "static"})
|
||||
assert-property: ("#hide-sidebar", {"checked": "false"})
|
||||
click: "#hide-sidebar"
|
||||
wait-for-css: (".sidebar", {"display": "none"})
|
||||
wait-for-css: ("#sidebar-button", {"position": "fixed"})
|
||||
store-position: ("#sidebar-button", {
|
||||
"y": sidebar_button_y,
|
||||
"x": sidebar_button_x,
|
||||
})
|
||||
assert-property: ("#hide-sidebar", {"checked": "false"})
|
||||
click: "#hide-sidebar"
|
||||
wait-for-css: (".sidebar", {"display": "none"})
|
||||
wait-for-css: ("#sidebar-button", {"position": "static"})
|
||||
assert-position: ("#sidebar-button", {
|
||||
"y": |sidebar_button_y|,
|
||||
"x": |sidebar_button_x|,
|
||||
})
|
||||
assert-property: ("#hide-sidebar", {"checked": "true"})
|
||||
press-key: "Escape"
|
||||
// Clicking the sidebar button should work, and implicitly re-enable
|
||||
// the persistent navigation bar
|
||||
wait-for-css: ("#settings", {"display": "none"})
|
||||
assert-position: ("#sidebar-button", {
|
||||
"y": |sidebar_button_y|,
|
||||
"x": |sidebar_button_x|,
|
||||
})
|
||||
click: "#sidebar-button"
|
||||
wait-for-css: (".sidebar", {"display": "block"})
|
||||
|
||||
@@ -141,7 +141,7 @@ click: "#sidebar-button"
|
||||
wait-for-css: (".src .sidebar > *", {"visibility": "hidden"})
|
||||
// We scroll to line 117 to change the scroll position.
|
||||
scroll-to: '//*[@id="117"]'
|
||||
store-value: (y_offset, "2578")
|
||||
store-value: (y_offset, "2567")
|
||||
assert-window-property: {"pageYOffset": |y_offset|}
|
||||
// Expanding the sidebar...
|
||||
click: "#sidebar-button"
|
||||
|
||||
@@ -85,4 +85,4 @@ assert-false: ".src-sidebar-expanded"
|
||||
assert: "nav.sidebar"
|
||||
|
||||
// Check that the topbar is not visible
|
||||
assert-false: ".mobile-topbar"
|
||||
assert-false: "rustdoc-topbar"
|
||||
|
||||
@@ -200,7 +200,7 @@ drag-and-drop: ((205, 100), (108, 100))
|
||||
assert-position: (".sidebar-crate > h2 > a", {"x": -3})
|
||||
|
||||
// Check that the mobile sidebar and the source sidebar use the same icon.
|
||||
store-css: (".mobile-topbar .sidebar-menu-toggle::before", {"content": image_url})
|
||||
store-css: ("rustdoc-topbar .sidebar-menu-toggle::before", {"content": image_url})
|
||||
// Then we go to a source page.
|
||||
click: ".main-heading .src"
|
||||
assert-css: ("#sidebar-button a::before", {"content": |image_url|})
|
||||
@@ -212,7 +212,7 @@ assert: |sidebar_background| != |sidebar_background_hover|
|
||||
click: "#sidebar-button a"
|
||||
wait-for: "html.src-sidebar-expanded"
|
||||
assert-css: ("#sidebar-button a:hover", {"background-color": |sidebar_background_hover|})
|
||||
move-cursor-to: "#settings-menu"
|
||||
move-cursor-to: "#search-button"
|
||||
assert-css: ("#sidebar-button a:not(:hover)", {"background-color": |sidebar_background|})
|
||||
// Closing sidebar.
|
||||
click: "#sidebar-button a"
|
||||
@@ -220,7 +220,7 @@ wait-for: "html:not(.src-sidebar-expanded)"
|
||||
// Now we check the same when the sidebar button is moved alongside the search.
|
||||
set-window-size: (500, 500)
|
||||
store-css: ("#sidebar-button a:hover", {"background-color": not_sidebar_background_hover})
|
||||
move-cursor-to: "#settings-menu"
|
||||
move-cursor-to: "rustdoc-toolbar #search-button"
|
||||
store-css: ("#sidebar-button a:not(:hover)", {"background-color": not_sidebar_background})
|
||||
// The sidebar background is supposed to be the same as the main background.
|
||||
assert-css: ("body", {"background-color": |not_sidebar_background|})
|
||||
|
||||
@@ -8,13 +8,13 @@ set-window-size: (600, 800)
|
||||
assert-property: ("html", {"scrollTop": "0"})
|
||||
|
||||
click: '//a[text() = "barbar" and @href="#5-7"]'
|
||||
assert-property: ("html", {"scrollTop": "206"})
|
||||
assert-property: ("html", {"scrollTop": "195"})
|
||||
click: '//a[text() = "bar" and @href="#28-36"]'
|
||||
assert-property: ("html", {"scrollTop": "239"})
|
||||
assert-property: ("html", {"scrollTop": "228"})
|
||||
click: '//a[normalize-space() = "sub_fn" and @href="#2-4"]'
|
||||
assert-property: ("html", {"scrollTop": "134"})
|
||||
assert-property: ("html", {"scrollTop": "123"})
|
||||
|
||||
// We now check that clicking on lines doesn't change the scroll
|
||||
// Extra information: the "sub_fn" function header is on line 1.
|
||||
click: '//*[@id="6"]'
|
||||
assert-property: ("html", {"scrollTop": "134"})
|
||||
assert-property: ("html", {"scrollTop": "123"})
|
||||
|
||||
@@ -89,9 +89,9 @@ assert-css: ("a[data-nosnippet]", {"text-align": "right"}, ALL)
|
||||
// do anything (and certainly not add a `#NaN` to the URL!).
|
||||
go-to: "file://" + |DOC_PATH| + "/src/test_docs/lib.rs.html"
|
||||
// We use this assert-position to know where we will click.
|
||||
assert-position: ("//*[@id='1']", {"x": 81, "y": 169})
|
||||
// We click on the left of the "1" anchor but still in the `a[data-nosnippet]`.
|
||||
click: (77, 163)
|
||||
assert-position: ("//*[@id='1']", {"x": 81, "y": 141})
|
||||
// We click on the left of the "1" anchor but still in the "src-line-number" `<pre>`.
|
||||
click: (135, 77)
|
||||
assert-document-property: ({"URL": "/lib.rs.html"}, ENDS_WITH)
|
||||
|
||||
// Checking the source code sidebar.
|
||||
@@ -156,27 +156,8 @@ call-function: ("check-sidebar-dir-entry", {
|
||||
"y": |source_sidebar_title_y| + |source_sidebar_title_height| + 7,
|
||||
})
|
||||
|
||||
// Check the search form
|
||||
assert-css: ("nav.sub", {"flex-direction": "row"})
|
||||
// The goal of this test is to ensure the search input is perfectly centered
|
||||
// between the top of the page and the header.
|
||||
// To check this, we maintain the invariant:
|
||||
//
|
||||
// offsetTop[nav.sub form] = offsetTop[#main-content] - offsetHeight[nav.sub form] - offsetTop[nav.sub form]
|
||||
assert-position: ("nav.sub form", {"y": 15})
|
||||
assert-property: ("nav.sub form", {"offsetHeight": 34})
|
||||
assert-position: ("h1", {"y": 68})
|
||||
// 15 = 64 - 34 - 15
|
||||
|
||||
// Now do the same check on moderately-sized, tablet mobile.
|
||||
set-window-size: (700, 700)
|
||||
assert-css: ("nav.sub", {"flex-direction": "row"})
|
||||
assert-position: ("nav.sub form", {"y": 8})
|
||||
assert-property: ("nav.sub form", {"offsetHeight": 34})
|
||||
assert-position: ("h1", {"y": 54})
|
||||
// 8 = 50 - 34 - 8
|
||||
|
||||
// Check the sidebar directory entries have a marker and spacing (tablet).
|
||||
set-window-size: (700, 700)
|
||||
store-property: (".src-sidebar-title", {
|
||||
"offsetHeight": source_sidebar_title_height,
|
||||
"offsetTop": source_sidebar_title_y,
|
||||
@@ -187,11 +168,8 @@ call-function: ("check-sidebar-dir-entry", {
|
||||
"y": |source_sidebar_title_y| + |source_sidebar_title_height| + 7,
|
||||
})
|
||||
|
||||
// Tiny, phone mobile gets a different display where the logo is stacked on top.
|
||||
set-window-size: (450, 700)
|
||||
assert-css: ("nav.sub", {"flex-direction": "column"})
|
||||
|
||||
// Check the sidebar directory entries have a marker and spacing (phone).
|
||||
set-window-size: (450, 700)
|
||||
store-property: (".src-sidebar-title", {
|
||||
"offsetHeight": source_sidebar_title_height,
|
||||
"offsetTop": source_sidebar_title_y,
|
||||
|
||||
@@ -13,7 +13,7 @@ define-function: (
|
||||
)
|
||||
|
||||
store-size: (".rust code", {"width": width, "height": height})
|
||||
click: "#settings-menu"
|
||||
click: "main .settings-menu"
|
||||
wait-for: "#settings"
|
||||
call-function: ("click-code-wrapping", {"expected": "true"})
|
||||
wait-for-size-false: (".rust code", {"width": |width|, "height": |height|})
|
||||
@@ -28,7 +28,7 @@ assert-size: (".rust code", {"width": |width|, "height": |height|})
|
||||
|
||||
// Now let's check in docs code examples.
|
||||
go-to: "file://" + |DOC_PATH| + "/test_docs/trait_bounds/index.html"
|
||||
click: "#settings-menu"
|
||||
click: "main .settings-menu"
|
||||
wait-for: "#settings"
|
||||
|
||||
store-property: (".example-wrap .rust code", {"scrollWidth": rust_width, "scrollHeight": rust_height})
|
||||
|
||||
@@ -7,7 +7,7 @@ store-value: (background_light, "white")
|
||||
store-value: (background_dark, "#353535")
|
||||
store-value: (background_ayu, "#0f1419")
|
||||
|
||||
click: "#settings-menu"
|
||||
click: "rustdoc-toolbar .settings-menu"
|
||||
wait-for: "#theme-ayu"
|
||||
click: "#theme-ayu"
|
||||
// should be the ayu theme so let's check the color.
|
||||
@@ -75,7 +75,7 @@ store-value: (background_dark, "#353535")
|
||||
store-value: (background_ayu, "#0f1419")
|
||||
store-value: (background_custom_theme, "red")
|
||||
|
||||
click: "#settings-menu"
|
||||
click: "rustdoc-toolbar .settings-menu"
|
||||
wait-for: "#theme-ayu"
|
||||
click: "#theme-ayu"
|
||||
// should be the ayu theme so let's check the color.
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
// Ensure that the theme picker always starts with the actual defaults.
|
||||
go-to: "file://" + |DOC_PATH| + "/test_docs/index.html"
|
||||
click: "#settings-menu"
|
||||
click: "rustdoc-toolbar .settings-menu"
|
||||
wait-for: "#theme-system-preference"
|
||||
assert: "#theme-system-preference:checked"
|
||||
assert: "#preferred-light-theme-light:checked"
|
||||
@@ -16,7 +16,7 @@ set-local-storage: {
|
||||
"rustdoc-theme": "ayu"
|
||||
}
|
||||
go-to: "file://" + |DOC_PATH| + "/test_docs/index.html"
|
||||
click: "#settings-menu"
|
||||
click: "rustdoc-toolbar .settings-menu"
|
||||
wait-for: "#theme-system-preference"
|
||||
assert: "#theme-system-preference:checked"
|
||||
assert: "#preferred-light-theme-light:checked"
|
||||
|
||||
@@ -13,4 +13,4 @@ assert-attribute-false: (".impl-items .toggle", {"open": ""})
|
||||
// Click the "Trait" part of "impl Trait" and verify it navigates.
|
||||
click: "#impl-Trait-for-Foo h3 a:first-of-type"
|
||||
assert-text: (".main-heading .rustdoc-breadcrumbs", "lib2")
|
||||
assert-text: (".main-heading h1", "Trait TraitCopy item path")
|
||||
assert-text: (".main-heading h1", "Trait Trait Copy item path")
|
||||
|
||||
@@ -3,12 +3,12 @@
|
||||
go-to: "file://" + |DOC_PATH| + "/test_docs/struct.Foo.html"
|
||||
set-window-size: (433, 600)
|
||||
assert-attribute: (".top-doc", {"open": ""})
|
||||
click: (4, 270) // This is the position of the top doc comment toggle
|
||||
click: (4, 230) // This is the position of the top doc comment toggle
|
||||
assert-attribute-false: (".top-doc", {"open": ""})
|
||||
click: (4, 270)
|
||||
click: (4, 230)
|
||||
assert-attribute: (".top-doc", {"open": ""})
|
||||
// To ensure that the toggle isn't over the text, we check that the toggle isn't clicked.
|
||||
click: (3, 270)
|
||||
click: (3, 230)
|
||||
assert-attribute: (".top-doc", {"open": ""})
|
||||
|
||||
// Assert the position of the toggle on the top doc block.
|
||||
@@ -24,12 +24,12 @@ assert-position: (
|
||||
// Now we do the same but with a little bigger width
|
||||
set-window-size: (600, 600)
|
||||
assert-attribute: (".top-doc", {"open": ""})
|
||||
click: (4, 270) // New Y position since all search elements are back on one line.
|
||||
click: (4, 230) // New Y position since all search elements are back on one line.
|
||||
assert-attribute-false: (".top-doc", {"open": ""})
|
||||
click: (4, 270)
|
||||
click: (4, 230)
|
||||
assert-attribute: (".top-doc", {"open": ""})
|
||||
// To ensure that the toggle isn't over the text, we check that the toggle isn't clicked.
|
||||
click: (3, 270)
|
||||
click: (3, 230)
|
||||
assert-attribute: (".top-doc", {"open": ""})
|
||||
|
||||
// Same check on trait items.
|
||||
|
||||
@@ -64,7 +64,7 @@ define-function: (
|
||||
"filter": |filter|,
|
||||
})
|
||||
// moving the cursor somewhere else to not mess with next function calls.
|
||||
move-cursor-to: ".search-input"
|
||||
move-cursor-to: "#search-button"
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@@ -47,27 +47,27 @@ assert-property: ("pre.item-decl", {"scrollWidth": "950"})
|
||||
set-window-size: (600, 600)
|
||||
go-to: "file://" + |DOC_PATH| + "/lib2/too_long/struct.SuperIncrediblyLongLongLongLongLongLongLongGigaGigaGigaMegaLongLongLongStructName.html"
|
||||
// It shouldn't have an overflow in the topbar either.
|
||||
store-property: (".mobile-topbar", {"scrollWidth": scrollWidth})
|
||||
assert-property: (".mobile-topbar", {"clientWidth": |scrollWidth|})
|
||||
assert-css: (".mobile-topbar h2", {"overflow-x": "hidden"})
|
||||
store-property: ("rustdoc-topbar", {"scrollWidth": scrollWidth})
|
||||
assert-property: ("rustdoc-topbar", {"clientWidth": |scrollWidth|}, NEAR)
|
||||
assert-css: ("rustdoc-topbar h2", {"overflow-x": "hidden"})
|
||||
|
||||
// Check that main heading and toolbar go side-by-side, both on desktop and on mobile.
|
||||
set-window-size: (1100, 800)
|
||||
go-to: "file://" + |DOC_PATH| + "/lib2/too_long/struct.SuperIncrediblyLongLongLongLongLongLongLongGigaGigaGigaMegaLongLongLongStructName.html"
|
||||
compare-elements-position: (".main-heading h1", ".main-heading rustdoc-toolbar", ["y"])
|
||||
compare-elements-position-near-false: (".main-heading h1", ".main-heading rustdoc-toolbar", {"x": 550})
|
||||
compare-elements-position: (".main-heading h1", ".main-heading rustdoc-toolbar #search-button", ["y"])
|
||||
compare-elements-position-near-false: (".main-heading h1", ".main-heading rustdoc-toolbar #search-button", {"x": 300})
|
||||
go-to: "file://" + |DOC_PATH| + "/lib2/index.html"
|
||||
compare-elements-position: (".main-heading h1", ".main-heading rustdoc-toolbar", ["y"])
|
||||
compare-elements-position-near-false: (".main-heading h1", ".main-heading rustdoc-toolbar", {"x": 550})
|
||||
compare-elements-position: (".main-heading h1", ".main-heading rustdoc-toolbar #search-button", ["y"])
|
||||
compare-elements-position-near-false: (".main-heading h1", ".main-heading rustdoc-toolbar #search-button", {"x": 300})
|
||||
|
||||
// On mobile, they always wrap.
|
||||
set-window-size: (600, 600)
|
||||
go-to: "file://" + |DOC_PATH| + "/lib2/too_long/struct.SuperIncrediblyLongLongLongLongLongLongLongGigaGigaGigaMegaLongLongLongStructName.html"
|
||||
compare-elements-position: (".main-heading h1", ".main-heading rustdoc-toolbar", ["y"])
|
||||
compare-elements-position-near-false: (".main-heading h1", ".main-heading rustdoc-toolbar", {"x": 200})
|
||||
compare-elements-position: (".main-heading h1", ".main-heading rustdoc-toolbar #search-button", ["y"])
|
||||
compare-elements-position-near-false: (".main-heading h1", ".main-heading rustdoc-toolbar #search-button", {"x": 200})
|
||||
go-to: "file://" + |DOC_PATH| + "/lib2/index.html"
|
||||
compare-elements-position: (".main-heading h1", ".main-heading rustdoc-toolbar", ["y"])
|
||||
compare-elements-position-near-false: (".main-heading h1", ".main-heading rustdoc-toolbar", {"x": 200})
|
||||
compare-elements-position: (".main-heading h1", ".main-heading rustdoc-toolbar #search-button", ["y"])
|
||||
compare-elements-position-near-false: (".main-heading h1", ".main-heading rustdoc-toolbar #search-button", {"x": 200})
|
||||
|
||||
// Now we will check that the scrolling is working.
|
||||
// First on an item with "hidden methods".
|
||||
|
||||
@@ -5,14 +5,47 @@ define-function: (
|
||||
block {
|
||||
// Set the theme.
|
||||
// Open the settings menu.
|
||||
click: "#settings-menu"
|
||||
click: "rustdoc-toolbar .settings-menu"
|
||||
// Wait for the popover to appear...
|
||||
wait-for: "#settings"
|
||||
// Change the setting.
|
||||
click: "#theme-"+ |theme|
|
||||
// Close the popover.
|
||||
click: "#settings-menu"
|
||||
click: "rustdoc-toolbar .settings-menu"
|
||||
// Ensure that the local storage was correctly updated.
|
||||
assert-local-storage: {"rustdoc-theme": |theme|}
|
||||
},
|
||||
)
|
||||
|
||||
define-function: (
|
||||
"switch-theme-mobile",
|
||||
[theme],
|
||||
block {
|
||||
// Set the theme.
|
||||
// Open the settings menu.
|
||||
click: "rustdoc-topbar .settings-menu"
|
||||
// Wait for the popover to appear...
|
||||
wait-for: "#settings"
|
||||
// Change the setting.
|
||||
click: "#theme-"+ |theme|
|
||||
// Close the popover.
|
||||
click: "rustdoc-topbar .settings-menu"
|
||||
// Ensure that the local storage was correctly updated.
|
||||
assert-local-storage: {"rustdoc-theme": |theme|}
|
||||
},
|
||||
)
|
||||
|
||||
define-function: (
|
||||
"perform-search",
|
||||
[query],
|
||||
block {
|
||||
click: "#search-button"
|
||||
wait-for: ".search-input"
|
||||
write-into: (".search-input", |query|)
|
||||
press-key: 'Enter'
|
||||
// wait for the search to start
|
||||
wait-for: "#search-tabs"
|
||||
// then wait for it to finish
|
||||
wait-for-false: "#search-tabs .count.loading"
|
||||
}
|
||||
)
|
||||
|
||||
@@ -6,5 +6,10 @@ const EXPECTED = {
|
||||
'name': 'reference',
|
||||
'desc': "References, <code>&T</code> and <code>&mut T</code>.",
|
||||
},
|
||||
{
|
||||
'path': 'std::ops',
|
||||
'name': 'BitAnd',
|
||||
'desc': "The bitwise AND operator <code>&</code>.",
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
@@ -1,9 +1,7 @@
|
||||
const EXPECTED = {
|
||||
'query': '+',
|
||||
'others': [
|
||||
{ 'path': 'std::ops', 'name': 'AddAssign' },
|
||||
{ 'path': 'std::ops', 'name': 'Add' },
|
||||
{ 'path': 'core::ops', 'name': 'AddAssign' },
|
||||
{ 'path': 'core::ops', 'name': 'Add' },
|
||||
{ 'path': 'std::ops', 'name': 'AddAssign' },
|
||||
],
|
||||
};
|
||||
|
||||
@@ -9,6 +9,6 @@ const EXPECTED = {
|
||||
{ 'path': 'std::str', 'name': 'eq' },
|
||||
],
|
||||
'returned': [
|
||||
{ 'path': 'std::string::String', 'name': 'add' },
|
||||
{ 'path': 'std::string::String', 'name': 'new' },
|
||||
],
|
||||
};
|
||||
|
||||
@@ -20,12 +20,12 @@ const PARSED = [
|
||||
pathLast: "c",
|
||||
normalizedPathLast: "c",
|
||||
generics: [],
|
||||
typeFilter: -1,
|
||||
typeFilter: null,
|
||||
},
|
||||
]
|
||||
],
|
||||
],
|
||||
typeFilter: -1,
|
||||
typeFilter: null,
|
||||
},
|
||||
],
|
||||
foundElems: 1,
|
||||
@@ -51,11 +51,11 @@ const PARSED = [
|
||||
pathWithoutLast: [],
|
||||
pathLast: "c",
|
||||
generics: [],
|
||||
typeFilter: -1,
|
||||
typeFilter: null,
|
||||
}]
|
||||
],
|
||||
],
|
||||
typeFilter: -1,
|
||||
typeFilter: null,
|
||||
},
|
||||
],
|
||||
foundElems: 1,
|
||||
@@ -81,11 +81,11 @@ const PARSED = [
|
||||
pathWithoutLast: [],
|
||||
pathLast: "never",
|
||||
generics: [],
|
||||
typeFilter: 1,
|
||||
typeFilter: "primitive",
|
||||
}]
|
||||
],
|
||||
],
|
||||
typeFilter: -1,
|
||||
typeFilter: null,
|
||||
},
|
||||
],
|
||||
foundElems: 1,
|
||||
@@ -111,11 +111,11 @@ const PARSED = [
|
||||
pathWithoutLast: [],
|
||||
pathLast: "[]",
|
||||
generics: [],
|
||||
typeFilter: 1,
|
||||
typeFilter: "primitive",
|
||||
}]
|
||||
],
|
||||
],
|
||||
typeFilter: -1,
|
||||
typeFilter: null,
|
||||
},
|
||||
],
|
||||
foundElems: 1,
|
||||
@@ -147,14 +147,14 @@ const PARSED = [
|
||||
pathWithoutLast: [],
|
||||
pathLast: "never",
|
||||
generics: [],
|
||||
typeFilter: 1,
|
||||
typeFilter: "primitive",
|
||||
},
|
||||
],
|
||||
typeFilter: 1,
|
||||
typeFilter: "primitive",
|
||||
}]
|
||||
],
|
||||
],
|
||||
typeFilter: -1,
|
||||
typeFilter: null,
|
||||
},
|
||||
],
|
||||
foundElems: 1,
|
||||
@@ -213,7 +213,7 @@ const PARSED = [
|
||||
pathWithoutLast: [],
|
||||
pathLast: "c",
|
||||
generics: [],
|
||||
typeFilter: -1,
|
||||
typeFilter: null,
|
||||
},
|
||||
{
|
||||
name: "X",
|
||||
@@ -221,12 +221,12 @@ const PARSED = [
|
||||
pathWithoutLast: [],
|
||||
pathLast: "x",
|
||||
generics: [],
|
||||
typeFilter: -1,
|
||||
typeFilter: null,
|
||||
},
|
||||
],
|
||||
],
|
||||
],
|
||||
typeFilter: -1,
|
||||
typeFilter: null,
|
||||
},
|
||||
],
|
||||
foundElems: 1,
|
||||
|
||||
@@ -406,10 +406,10 @@ const PARSED = [
|
||||
pathWithoutLast: [],
|
||||
pathLast: "x",
|
||||
generics: [],
|
||||
typeFilter: -1,
|
||||
typeFilter: null,
|
||||
},
|
||||
],
|
||||
typeFilter: -1,
|
||||
typeFilter: null,
|
||||
},
|
||||
{
|
||||
name: "y",
|
||||
@@ -417,7 +417,7 @@ const PARSED = [
|
||||
pathWithoutLast: [],
|
||||
pathLast: "y",
|
||||
generics: [],
|
||||
typeFilter: -1,
|
||||
typeFilter: null,
|
||||
},
|
||||
],
|
||||
foundElems: 2,
|
||||
@@ -440,7 +440,7 @@ const PARSED = [
|
||||
pathWithoutLast: [],
|
||||
pathLast: "x",
|
||||
generics: [],
|
||||
typeFilter: -1,
|
||||
typeFilter: null,
|
||||
},
|
||||
{
|
||||
name: "y",
|
||||
@@ -448,10 +448,10 @@ const PARSED = [
|
||||
pathWithoutLast: [],
|
||||
pathLast: "y",
|
||||
generics: [],
|
||||
typeFilter: -1,
|
||||
typeFilter: null,
|
||||
},
|
||||
],
|
||||
typeFilter: -1,
|
||||
typeFilter: null,
|
||||
},
|
||||
],
|
||||
foundElems: 1,
|
||||
@@ -468,7 +468,7 @@ const PARSED = [
|
||||
pathWithoutLast: [],
|
||||
pathLast: "p",
|
||||
generics: [],
|
||||
typeFilter: -1,
|
||||
typeFilter: null,
|
||||
},
|
||||
{
|
||||
name: "x",
|
||||
@@ -476,7 +476,7 @@ const PARSED = [
|
||||
pathWithoutLast: [],
|
||||
pathLast: "x",
|
||||
generics: [],
|
||||
typeFilter: -1,
|
||||
typeFilter: null,
|
||||
},
|
||||
{
|
||||
name: "y",
|
||||
@@ -484,7 +484,7 @@ const PARSED = [
|
||||
pathWithoutLast: [],
|
||||
pathLast: "y",
|
||||
generics: [],
|
||||
typeFilter: -1,
|
||||
typeFilter: null,
|
||||
},
|
||||
],
|
||||
foundElems: 3,
|
||||
|
||||
@@ -7,7 +7,7 @@ const PARSED = [
|
||||
pathWithoutLast: [],
|
||||
pathLast: "foo",
|
||||
generics: [],
|
||||
typeFilter: 7,
|
||||
typeFilter: "fn",
|
||||
}],
|
||||
foundElems: 1,
|
||||
userQuery: "fn:foo",
|
||||
@@ -22,7 +22,7 @@ const PARSED = [
|
||||
pathWithoutLast: [],
|
||||
pathLast: "foo",
|
||||
generics: [],
|
||||
typeFilter: 6,
|
||||
typeFilter: "enum",
|
||||
}],
|
||||
foundElems: 1,
|
||||
userQuery: "enum : foo",
|
||||
@@ -45,7 +45,7 @@ const PARSED = [
|
||||
pathWithoutLast: [],
|
||||
pathLast: "macro",
|
||||
generics: [],
|
||||
typeFilter: 16,
|
||||
typeFilter: "macro",
|
||||
}],
|
||||
foundElems: 1,
|
||||
userQuery: "macro!",
|
||||
@@ -60,7 +60,7 @@ const PARSED = [
|
||||
pathWithoutLast: [],
|
||||
pathLast: "mac",
|
||||
generics: [],
|
||||
typeFilter: 16,
|
||||
typeFilter: "macro",
|
||||
}],
|
||||
foundElems: 1,
|
||||
userQuery: "macro:mac!",
|
||||
@@ -75,7 +75,7 @@ const PARSED = [
|
||||
pathWithoutLast: ["a"],
|
||||
pathLast: "mac",
|
||||
generics: [],
|
||||
typeFilter: 16,
|
||||
typeFilter: "macro",
|
||||
}],
|
||||
foundElems: 1,
|
||||
userQuery: "a::mac!",
|
||||
@@ -93,7 +93,7 @@ const PARSED = [
|
||||
pathWithoutLast: [],
|
||||
pathLast: "foo",
|
||||
generics: [],
|
||||
typeFilter: 7,
|
||||
typeFilter: "fn",
|
||||
}],
|
||||
error: null,
|
||||
},
|
||||
@@ -114,10 +114,10 @@ const PARSED = [
|
||||
pathWithoutLast: [],
|
||||
pathLast: "bar",
|
||||
generics: [],
|
||||
typeFilter: 7,
|
||||
typeFilter: "fn",
|
||||
}
|
||||
],
|
||||
typeFilter: 7,
|
||||
typeFilter: "fn",
|
||||
}],
|
||||
error: null,
|
||||
},
|
||||
@@ -138,7 +138,7 @@ const PARSED = [
|
||||
pathWithoutLast: [],
|
||||
pathLast: "bar",
|
||||
generics: [],
|
||||
typeFilter: 7,
|
||||
typeFilter: "fn",
|
||||
},
|
||||
{
|
||||
name: "baz::fuzz",
|
||||
@@ -146,10 +146,10 @@ const PARSED = [
|
||||
pathWithoutLast: ["baz"],
|
||||
pathLast: "fuzz",
|
||||
generics: [],
|
||||
typeFilter: 6,
|
||||
typeFilter: "enum",
|
||||
},
|
||||
],
|
||||
typeFilter: 7,
|
||||
typeFilter: "fn",
|
||||
}],
|
||||
error: null,
|
||||
},
|
||||
|
||||
@@ -16,7 +16,7 @@ const PARSED = [
|
||||
pathWithoutLast: [],
|
||||
pathLast: "p",
|
||||
generics: [],
|
||||
typeFilter: -1,
|
||||
typeFilter: null,
|
||||
},
|
||||
{
|
||||
name: "u8",
|
||||
@@ -24,7 +24,7 @@ const PARSED = [
|
||||
pathWithoutLast: [],
|
||||
pathLast: "u8",
|
||||
generics: [],
|
||||
typeFilter: -1,
|
||||
typeFilter: null,
|
||||
},
|
||||
],
|
||||
foundElems: 2,
|
||||
@@ -49,7 +49,7 @@ const PARSED = [
|
||||
generics: [],
|
||||
},
|
||||
],
|
||||
typeFilter: -1,
|
||||
typeFilter: null,
|
||||
},
|
||||
],
|
||||
foundElems: 1,
|
||||
@@ -82,7 +82,7 @@ const PARSED = [
|
||||
],
|
||||
},
|
||||
],
|
||||
typeFilter: -1,
|
||||
typeFilter: null,
|
||||
},
|
||||
],
|
||||
foundElems: 1,
|
||||
@@ -122,7 +122,7 @@ const PARSED = [
|
||||
generics: [],
|
||||
},
|
||||
],
|
||||
typeFilter: -1,
|
||||
typeFilter: null,
|
||||
},
|
||||
],
|
||||
foundElems: 1,
|
||||
@@ -162,7 +162,7 @@ const PARSED = [
|
||||
],
|
||||
},
|
||||
],
|
||||
typeFilter: -1,
|
||||
typeFilter: null,
|
||||
},
|
||||
],
|
||||
foundElems: 1,
|
||||
|
||||
@@ -25,11 +25,11 @@ const PARSED = [
|
||||
generics: [],
|
||||
},
|
||||
],
|
||||
typeFilter: -1,
|
||||
typeFilter: null,
|
||||
}],
|
||||
],
|
||||
],
|
||||
typeFilter: -1,
|
||||
typeFilter: null,
|
||||
}],
|
||||
foundElems: 1,
|
||||
userQuery: "(-> F<P>)",
|
||||
@@ -53,11 +53,11 @@ const PARSED = [
|
||||
pathWithoutLast: [],
|
||||
pathLast: "p",
|
||||
generics: [],
|
||||
typeFilter: -1,
|
||||
typeFilter: null,
|
||||
}],
|
||||
],
|
||||
],
|
||||
typeFilter: -1,
|
||||
typeFilter: null,
|
||||
}],
|
||||
foundElems: 1,
|
||||
userQuery: "(-> P)",
|
||||
@@ -81,11 +81,11 @@ const PARSED = [
|
||||
pathWithoutLast: [],
|
||||
pathLast: "a",
|
||||
generics: [],
|
||||
typeFilter: -1,
|
||||
typeFilter: null,
|
||||
}],
|
||||
],
|
||||
],
|
||||
typeFilter: -1,
|
||||
typeFilter: null,
|
||||
}],
|
||||
foundElems: 1,
|
||||
userQuery: "(->,a)",
|
||||
@@ -113,7 +113,7 @@ const PARSED = [
|
||||
generics: [],
|
||||
},
|
||||
],
|
||||
typeFilter: -1,
|
||||
typeFilter: null,
|
||||
}],
|
||||
bindings: [
|
||||
[
|
||||
@@ -121,7 +121,7 @@ const PARSED = [
|
||||
[],
|
||||
],
|
||||
],
|
||||
typeFilter: -1,
|
||||
typeFilter: null,
|
||||
}],
|
||||
foundElems: 1,
|
||||
userQuery: "(F<P> ->)",
|
||||
@@ -141,7 +141,7 @@ const PARSED = [
|
||||
pathWithoutLast: [],
|
||||
pathLast: "p",
|
||||
generics: [],
|
||||
typeFilter: -1,
|
||||
typeFilter: null,
|
||||
}],
|
||||
bindings: [
|
||||
[
|
||||
@@ -149,7 +149,7 @@ const PARSED = [
|
||||
[],
|
||||
],
|
||||
],
|
||||
typeFilter: -1,
|
||||
typeFilter: null,
|
||||
}],
|
||||
foundElems: 1,
|
||||
userQuery: "(P ->)",
|
||||
@@ -169,7 +169,7 @@ const PARSED = [
|
||||
pathWithoutLast: [],
|
||||
pathLast: "a",
|
||||
generics: [],
|
||||
typeFilter: -1,
|
||||
typeFilter: null,
|
||||
}],
|
||||
bindings: [
|
||||
[
|
||||
@@ -177,7 +177,7 @@ const PARSED = [
|
||||
[],
|
||||
],
|
||||
],
|
||||
typeFilter: -1,
|
||||
typeFilter: null,
|
||||
}],
|
||||
foundElems: 1,
|
||||
userQuery: "(,a->)",
|
||||
@@ -197,7 +197,7 @@ const PARSED = [
|
||||
pathWithoutLast: [],
|
||||
pathLast: "aaaaa",
|
||||
generics: [],
|
||||
typeFilter: -1,
|
||||
typeFilter: null,
|
||||
}],
|
||||
bindings: [
|
||||
[
|
||||
@@ -208,11 +208,11 @@ const PARSED = [
|
||||
pathWithoutLast: [],
|
||||
pathLast: "a",
|
||||
generics: [],
|
||||
typeFilter: -1,
|
||||
typeFilter: null,
|
||||
}],
|
||||
],
|
||||
],
|
||||
typeFilter: -1,
|
||||
typeFilter: null,
|
||||
}],
|
||||
foundElems: 1,
|
||||
userQuery: "(aaaaa->a)",
|
||||
@@ -233,7 +233,7 @@ const PARSED = [
|
||||
pathWithoutLast: [],
|
||||
pathLast: "aaaaa",
|
||||
generics: [],
|
||||
typeFilter: -1,
|
||||
typeFilter: null,
|
||||
},
|
||||
{
|
||||
name: "b",
|
||||
@@ -241,7 +241,7 @@ const PARSED = [
|
||||
pathWithoutLast: [],
|
||||
pathLast: "b",
|
||||
generics: [],
|
||||
typeFilter: -1,
|
||||
typeFilter: null,
|
||||
},
|
||||
],
|
||||
bindings: [
|
||||
@@ -253,11 +253,11 @@ const PARSED = [
|
||||
pathWithoutLast: [],
|
||||
pathLast: "a",
|
||||
generics: [],
|
||||
typeFilter: -1,
|
||||
typeFilter: null,
|
||||
}],
|
||||
],
|
||||
],
|
||||
typeFilter: -1,
|
||||
typeFilter: null,
|
||||
}],
|
||||
foundElems: 1,
|
||||
userQuery: "(aaaaa, b -> a)",
|
||||
@@ -278,7 +278,7 @@ const PARSED = [
|
||||
pathWithoutLast: [],
|
||||
pathLast: "aaaaa",
|
||||
generics: [],
|
||||
typeFilter: -1,
|
||||
typeFilter: null,
|
||||
},
|
||||
{
|
||||
name: "b",
|
||||
@@ -286,7 +286,7 @@ const PARSED = [
|
||||
pathWithoutLast: [],
|
||||
pathLast: "b",
|
||||
generics: [],
|
||||
typeFilter: -1,
|
||||
typeFilter: null,
|
||||
},
|
||||
],
|
||||
bindings: [
|
||||
@@ -298,11 +298,11 @@ const PARSED = [
|
||||
pathWithoutLast: [],
|
||||
pathLast: "a",
|
||||
generics: [],
|
||||
typeFilter: -1,
|
||||
typeFilter: null,
|
||||
}],
|
||||
],
|
||||
],
|
||||
typeFilter: 1,
|
||||
typeFilter: "primitive",
|
||||
}],
|
||||
foundElems: 1,
|
||||
userQuery: "primitive:(aaaaa, b -> a)",
|
||||
@@ -318,7 +318,7 @@ const PARSED = [
|
||||
pathWithoutLast: [],
|
||||
pathLast: "x",
|
||||
generics: [],
|
||||
typeFilter: -1,
|
||||
typeFilter: null,
|
||||
},
|
||||
{
|
||||
name: "->",
|
||||
@@ -332,7 +332,7 @@ const PARSED = [
|
||||
pathWithoutLast: [],
|
||||
pathLast: "aaaaa",
|
||||
generics: [],
|
||||
typeFilter: -1,
|
||||
typeFilter: null,
|
||||
},
|
||||
{
|
||||
name: "b",
|
||||
@@ -340,7 +340,7 @@ const PARSED = [
|
||||
pathWithoutLast: [],
|
||||
pathLast: "b",
|
||||
generics: [],
|
||||
typeFilter: -1,
|
||||
typeFilter: null,
|
||||
},
|
||||
],
|
||||
bindings: [
|
||||
@@ -352,11 +352,11 @@ const PARSED = [
|
||||
pathWithoutLast: [],
|
||||
pathLast: "a",
|
||||
generics: [],
|
||||
typeFilter: -1,
|
||||
typeFilter: null,
|
||||
}],
|
||||
],
|
||||
],
|
||||
typeFilter: 10,
|
||||
typeFilter: "trait",
|
||||
}
|
||||
],
|
||||
foundElems: 2,
|
||||
@@ -390,11 +390,11 @@ const PARSED = [
|
||||
generics: [],
|
||||
},
|
||||
],
|
||||
typeFilter: -1,
|
||||
typeFilter: null,
|
||||
}],
|
||||
],
|
||||
],
|
||||
typeFilter: -1,
|
||||
typeFilter: null,
|
||||
}],
|
||||
foundElems: 1,
|
||||
userQuery: "Fn () -> F<P>",
|
||||
@@ -418,11 +418,11 @@ const PARSED = [
|
||||
pathWithoutLast: [],
|
||||
pathLast: "p",
|
||||
generics: [],
|
||||
typeFilter: -1,
|
||||
typeFilter: null,
|
||||
}],
|
||||
],
|
||||
],
|
||||
typeFilter: -1,
|
||||
typeFilter: null,
|
||||
}],
|
||||
foundElems: 1,
|
||||
userQuery: "FnMut() -> P",
|
||||
@@ -446,11 +446,11 @@ const PARSED = [
|
||||
pathWithoutLast: [],
|
||||
pathLast: "p",
|
||||
generics: [],
|
||||
typeFilter: -1,
|
||||
typeFilter: null,
|
||||
}],
|
||||
],
|
||||
],
|
||||
typeFilter: -1,
|
||||
typeFilter: null,
|
||||
}],
|
||||
foundElems: 1,
|
||||
userQuery: "(FnMut() -> P)",
|
||||
@@ -478,7 +478,7 @@ const PARSED = [
|
||||
generics: [],
|
||||
},
|
||||
],
|
||||
typeFilter: -1,
|
||||
typeFilter: null,
|
||||
}],
|
||||
bindings: [
|
||||
[
|
||||
@@ -486,7 +486,7 @@ const PARSED = [
|
||||
[],
|
||||
],
|
||||
],
|
||||
typeFilter: -1,
|
||||
typeFilter: null,
|
||||
}],
|
||||
foundElems: 1,
|
||||
userQuery: "Fn(F<P>)",
|
||||
@@ -507,7 +507,7 @@ const PARSED = [
|
||||
pathWithoutLast: [],
|
||||
pathLast: "aaaaa",
|
||||
generics: [],
|
||||
typeFilter: -1,
|
||||
typeFilter: null,
|
||||
},
|
||||
{
|
||||
name: "b",
|
||||
@@ -515,7 +515,7 @@ const PARSED = [
|
||||
pathWithoutLast: [],
|
||||
pathLast: "b",
|
||||
generics: [],
|
||||
typeFilter: -1,
|
||||
typeFilter: null,
|
||||
},
|
||||
],
|
||||
bindings: [
|
||||
@@ -527,11 +527,11 @@ const PARSED = [
|
||||
pathWithoutLast: [],
|
||||
pathLast: "a",
|
||||
generics: [],
|
||||
typeFilter: -1,
|
||||
typeFilter: null,
|
||||
}],
|
||||
],
|
||||
],
|
||||
typeFilter: 1,
|
||||
typeFilter: "primitive",
|
||||
}],
|
||||
foundElems: 1,
|
||||
userQuery: "primitive:fnonce(aaaaa, b) -> a",
|
||||
@@ -552,7 +552,7 @@ const PARSED = [
|
||||
pathWithoutLast: [],
|
||||
pathLast: "aaaaa",
|
||||
generics: [],
|
||||
typeFilter: -1,
|
||||
typeFilter: null,
|
||||
},
|
||||
{
|
||||
name: "b",
|
||||
@@ -560,7 +560,7 @@ const PARSED = [
|
||||
pathWithoutLast: [],
|
||||
pathLast: "b",
|
||||
generics: [],
|
||||
typeFilter: 0,
|
||||
typeFilter: "keyword",
|
||||
},
|
||||
],
|
||||
bindings: [
|
||||
@@ -572,11 +572,11 @@ const PARSED = [
|
||||
pathWithoutLast: [],
|
||||
pathLast: "a",
|
||||
generics: [],
|
||||
typeFilter: 10,
|
||||
typeFilter: "trait",
|
||||
}],
|
||||
],
|
||||
],
|
||||
typeFilter: 1,
|
||||
typeFilter: "primitive",
|
||||
}],
|
||||
foundElems: 1,
|
||||
userQuery: "primitive:fnonce(aaaaa, keyword:b) -> trait:a",
|
||||
@@ -592,7 +592,7 @@ const PARSED = [
|
||||
pathWithoutLast: [],
|
||||
pathLast: "x",
|
||||
generics: [],
|
||||
typeFilter: -1,
|
||||
typeFilter: null,
|
||||
},
|
||||
{
|
||||
name: "fn",
|
||||
@@ -612,7 +612,7 @@ const PARSED = [
|
||||
pathWithoutLast: [],
|
||||
pathLast: "aaaaa",
|
||||
generics: [],
|
||||
typeFilter: -1,
|
||||
typeFilter: null,
|
||||
},
|
||||
{
|
||||
name: "b",
|
||||
@@ -620,7 +620,7 @@ const PARSED = [
|
||||
pathWithoutLast: [],
|
||||
pathLast: "b",
|
||||
generics: [],
|
||||
typeFilter: -1,
|
||||
typeFilter: null,
|
||||
},
|
||||
],
|
||||
bindings: [
|
||||
@@ -632,11 +632,11 @@ const PARSED = [
|
||||
pathWithoutLast: [],
|
||||
pathLast: "a",
|
||||
generics: [],
|
||||
typeFilter: -1,
|
||||
typeFilter: null,
|
||||
}],
|
||||
],
|
||||
],
|
||||
typeFilter: -1,
|
||||
typeFilter: null,
|
||||
},
|
||||
],
|
||||
bindings: [
|
||||
@@ -645,7 +645,7 @@ const PARSED = [
|
||||
[],
|
||||
]
|
||||
],
|
||||
typeFilter: 10,
|
||||
typeFilter: "trait",
|
||||
}
|
||||
],
|
||||
foundElems: 2,
|
||||
@@ -662,7 +662,7 @@ const PARSED = [
|
||||
pathWithoutLast: [],
|
||||
pathLast: "a",
|
||||
generics: [],
|
||||
typeFilter: -1,
|
||||
typeFilter: null,
|
||||
},
|
||||
{
|
||||
name: "b",
|
||||
@@ -675,7 +675,7 @@ const PARSED = [
|
||||
pathWithoutLast: [],
|
||||
pathLast: "c",
|
||||
generics: [],
|
||||
typeFilter: -1,
|
||||
typeFilter: null,
|
||||
}],
|
||||
bindings: [
|
||||
[
|
||||
@@ -683,7 +683,7 @@ const PARSED = [
|
||||
[],
|
||||
]
|
||||
],
|
||||
typeFilter: -1,
|
||||
typeFilter: null,
|
||||
}
|
||||
],
|
||||
foundElems: 2,
|
||||
|
||||
@@ -13,10 +13,10 @@ const PARSED = [
|
||||
pathWithoutLast: [],
|
||||
pathLast: "never",
|
||||
generics: [],
|
||||
typeFilter: 1,
|
||||
typeFilter: "primitive",
|
||||
},
|
||||
],
|
||||
typeFilter: -1,
|
||||
typeFilter: null,
|
||||
}],
|
||||
foundElems: 1,
|
||||
userQuery: "R<!>",
|
||||
@@ -31,7 +31,7 @@ const PARSED = [
|
||||
pathWithoutLast: [],
|
||||
pathLast: "never",
|
||||
generics: [],
|
||||
typeFilter: 1,
|
||||
typeFilter: "primitive",
|
||||
}],
|
||||
foundElems: 1,
|
||||
userQuery: "!",
|
||||
@@ -46,7 +46,7 @@ const PARSED = [
|
||||
pathWithoutLast: [],
|
||||
pathLast: "a",
|
||||
generics: [],
|
||||
typeFilter: 16,
|
||||
typeFilter: "macro",
|
||||
}],
|
||||
foundElems: 1,
|
||||
userQuery: "a!",
|
||||
@@ -77,7 +77,7 @@ const PARSED = [
|
||||
pathWithoutLast: ["never"],
|
||||
pathLast: "b",
|
||||
generics: [],
|
||||
typeFilter: -1,
|
||||
typeFilter: null,
|
||||
}],
|
||||
foundElems: 1,
|
||||
userQuery: "!::b",
|
||||
@@ -122,10 +122,10 @@ const PARSED = [
|
||||
pathWithoutLast: [],
|
||||
pathLast: "t",
|
||||
generics: [],
|
||||
typeFilter: -1,
|
||||
typeFilter: null,
|
||||
}
|
||||
],
|
||||
typeFilter: -1,
|
||||
typeFilter: null,
|
||||
}],
|
||||
foundElems: 1,
|
||||
userQuery: "!::b<T>",
|
||||
|
||||
@@ -15,7 +15,7 @@ const PARSED = [
|
||||
generics: [],
|
||||
},
|
||||
],
|
||||
typeFilter: -1,
|
||||
typeFilter: null,
|
||||
}],
|
||||
foundElems: 1,
|
||||
userQuery: "R<P>",
|
||||
|
||||
@@ -7,7 +7,7 @@ const PARSED = [
|
||||
pathWithoutLast: ["a"],
|
||||
pathLast: "b",
|
||||
generics: [],
|
||||
typeFilter: -1,
|
||||
typeFilter: null,
|
||||
}],
|
||||
foundElems: 1,
|
||||
userQuery: "A::B",
|
||||
@@ -22,7 +22,7 @@ const PARSED = [
|
||||
pathWithoutLast: ["a"],
|
||||
pathLast: "a",
|
||||
generics: [],
|
||||
typeFilter: -1,
|
||||
typeFilter: null,
|
||||
}],
|
||||
foundElems: 1,
|
||||
userQuery: 'a:: a',
|
||||
@@ -37,7 +37,7 @@ const PARSED = [
|
||||
pathWithoutLast: ["a"],
|
||||
pathLast: "a",
|
||||
generics: [],
|
||||
typeFilter: -1,
|
||||
typeFilter: null,
|
||||
}],
|
||||
foundElems: 1,
|
||||
userQuery: 'a ::a',
|
||||
@@ -52,7 +52,7 @@ const PARSED = [
|
||||
pathWithoutLast: ["a"],
|
||||
pathLast: "a",
|
||||
generics: [],
|
||||
typeFilter: -1,
|
||||
typeFilter: null,
|
||||
}],
|
||||
foundElems: 1,
|
||||
userQuery: 'a :: a',
|
||||
@@ -68,7 +68,7 @@ const PARSED = [
|
||||
pathWithoutLast: ["a"],
|
||||
pathLast: "b",
|
||||
generics: [],
|
||||
typeFilter: -1,
|
||||
typeFilter: null,
|
||||
},
|
||||
{
|
||||
name: "C",
|
||||
@@ -76,7 +76,7 @@ const PARSED = [
|
||||
pathWithoutLast: [],
|
||||
pathLast: "c",
|
||||
generics: [],
|
||||
typeFilter: -1,
|
||||
typeFilter: null,
|
||||
},
|
||||
],
|
||||
foundElems: 2,
|
||||
@@ -101,7 +101,7 @@ const PARSED = [
|
||||
generics: [],
|
||||
},
|
||||
],
|
||||
typeFilter: -1,
|
||||
typeFilter: null,
|
||||
},
|
||||
{
|
||||
name: "C",
|
||||
@@ -109,7 +109,7 @@ const PARSED = [
|
||||
pathWithoutLast: [],
|
||||
pathLast: "c",
|
||||
generics: [],
|
||||
typeFilter: -1,
|
||||
typeFilter: null,
|
||||
},
|
||||
],
|
||||
foundElems: 2,
|
||||
@@ -125,7 +125,7 @@ const PARSED = [
|
||||
pathWithoutLast: ["mod"],
|
||||
pathLast: "a",
|
||||
generics: [],
|
||||
typeFilter: -1,
|
||||
typeFilter: null,
|
||||
}],
|
||||
foundElems: 1,
|
||||
userQuery: "mod::a",
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user