@@ -62,6 +62,6 @@ members = [ | |||||
[features] | [features] | ||||
tantivy-indexing = ["tantivy", "search/tantivy-indexing"] | tantivy-indexing = ["tantivy", "search/tantivy-indexing"] | ||||
[profile.release] | |||||
lto = true | |||||
codegen-units = 1 | |||||
#[profile.release] | |||||
#lto = true | |||||
#codegen-units = 1 |
@@ -11,7 +11,7 @@ serde_derive = "1" | |||||
chrono = "0.4" | chrono = "0.4" | ||||
globset = "0.4" | globset = "0.4" | ||||
lazy_static = "1" | lazy_static = "1" | ||||
syntect = "=3.2.0" | |||||
syntect = "5.0" | |||||
errors = { path = "../errors" } | errors = { path = "../errors" } | ||||
utils = { path = "../utils" } | utils = { path = "../utils" } |
@@ -9,11 +9,13 @@ use crate::config::Config; | |||||
lazy_static! { | lazy_static! { | ||||
pub static ref SYNTAX_SET: SyntaxSet = { | pub static ref SYNTAX_SET: SyntaxSet = { | ||||
let ss: SyntaxSet = | let ss: SyntaxSet = | ||||
from_binary(include_bytes!("../../../sublime_syntaxes/newlines.packdump")); | |||||
//from_binary(include_bytes!("../../../sublime_syntaxes/newlines.packdump")); | |||||
from_binary(include_bytes!("../sublime/newlines.packdump")); | |||||
ss | ss | ||||
}; | }; | ||||
pub static ref THEME_SET: ThemeSet = | pub static ref THEME_SET: ThemeSet = | ||||
from_binary(include_bytes!("../../../sublime_themes/all.themedump")); | |||||
//from_binary(include_bytes!("../../../sublime_themes/all.themedump")); | |||||
from_binary(include_bytes!("../sublime/all.themedump")); | |||||
} | } | ||||
/// Returns the highlighter and whether it was found in the extra or not | /// Returns the highlighter and whether it was found in the extra or not | ||||
@@ -8,4 +8,4 @@ edition = "2018" | |||||
tera = "1" | tera = "1" | ||||
toml = "0.5" | toml = "0.5" | ||||
image = "0.23" | image = "0.23" | ||||
syntect = "=3.2.0" | |||||
syntect = "5.0" |
@@ -6,7 +6,7 @@ edition = "2018" | |||||
[dependencies] | [dependencies] | ||||
tera = { version = "1", features = ["preserve_order"] } | tera = { version = "1", features = ["preserve_order"] } | ||||
syntect = "=3.2.0" | |||||
syntect = "5.0" | |||||
pulldown-cmark = "0.7" | pulldown-cmark = "0.7" | ||||
serde = "1" | serde = "1" | ||||
serde_derive = "1" | serde_derive = "1" | ||||
@@ -205,7 +205,7 @@ pub fn markdown_to_html(content: &str, context: &RenderContext) -> Result<Render | |||||
highlighter.highlight(&text, &SYNTAX_SET) | highlighter.highlight(&text, &SYNTAX_SET) | ||||
}; | }; | ||||
//let highlighted = &highlighter.highlight(&text, ss); | //let highlighted = &highlighter.highlight(&text, ss); | ||||
let html = styled_line_to_highlighted_html(&highlighted, background); | |||||
let html = styled_line_to_highlighted_html(&highlighted, background).unwrap(); | |||||
return Event::Html(html.into()); | return Event::Html(html.into()); | ||||
} | } | ||||
@@ -1,9 +1,10 @@ | |||||
#[allow(unused_imports)] | |||||
use std::str::FromStr; | use std::str::FromStr; | ||||
use std::collections::{HashMap, HashSet}; | use std::collections::{HashMap, HashSet}; | ||||
use chrono::{DateTime, Utc, NaiveDateTime, TimeZone}; | |||||
use elasticlunr::{Index, Language}; | use elasticlunr::{Index, Language}; | ||||
use lazy_static::lazy_static; | use lazy_static::lazy_static; | ||||
#[cfg(feature = "tantivy-indexing")] | |||||
use chrono::{DateTime, Utc, NaiveDateTime, TimeZone}; | |||||
#[allow(unused_imports)] | #[allow(unused_imports)] | ||||
use errors::{bail, Result, Error}; | use errors::{bail, Result, Error}; | ||||
use library::{Library, Section}; | use library::{Library, Section}; | ||||
@@ -105,7 +106,7 @@ fn parse_language(lang: &str) -> Option<tantivy::tokenizer::Language> { | |||||
}), | }), | ||||
// apparently not a code, so this is best available option | // apparently not a code, so this is best available option | ||||
_ => serde_json::from_str::<tantivy::tokenizer::Language>(lang).ok() | |||||
_ => serde_json::from_str::<tantivy::tokenizer::Language>(&format!("{{\"language\":\"{}\"}}", lang)).ok() | |||||
} | } | ||||
} | } | ||||
@@ -176,6 +177,7 @@ pub fn build_tantivy_index<P: AsRef<std::path::Path>>( | |||||
let empty_taxonomies: HashMap<String, Vec<String>> = Default::default(); | let empty_taxonomies: HashMap<String, Vec<String>> = Default::default(); | ||||
for section in library.sections_values() { | for section in library.sections_values() { | ||||
//eprintln!("section: {:?}, section.pages: {:?}", section, section.pages); | |||||
// reason for macro: Section/Page are different types but have same attributes | // reason for macro: Section/Page are different types but have same attributes | ||||
macro_rules! extract_content { | macro_rules! extract_content { | ||||
@@ -187,6 +189,8 @@ pub fn build_tantivy_index<P: AsRef<std::path::Path>>( | |||||
let cleaned_body: String = AMMONIA.clean(&$page.content).to_string(); | let cleaned_body: String = AMMONIA.clean(&$page.content).to_string(); | ||||
//eprintln!("indexing {}", $page.permalink.as_str()); | |||||
Some(IndexContent { | Some(IndexContent { | ||||
title: $page.meta.title.as_ref().map(|x| x.as_str()).unwrap_or(""), | title: $page.meta.title.as_ref().map(|x| x.as_str()).unwrap_or(""), | ||||
description: $page.meta.description.as_ref().map(|x| x.as_str()).unwrap_or(""), | description: $page.meta.description.as_ref().map(|x| x.as_str()).unwrap_or(""), | ||||
@@ -198,6 +202,7 @@ pub fn build_tantivy_index<P: AsRef<std::path::Path>>( | |||||
taxonomies: &empty_taxonomies, | taxonomies: &empty_taxonomies, | ||||
}) | }) | ||||
} else { | } else { | ||||
//eprintln!("not indexing {}", $page.permalink.as_str()); | |||||
None | None | ||||
} | } | ||||
}} | }} | ||||