feat: xtask themelint (#3234)
* feat: cargo xtask themelint <theme> * fix: add docs and print json error status * fix: refactor paths -> path * fix: remove unused function * fix: only report one err per scope (ui.statusline is reported if none of ui.statusline.* is recognized) * fix: save work for later * fix: finally decided on a design * fix: ready for discussion * fix: better rules * fix: lint precision * fix: String -> &'static str * fix: allowlist not denylist for file type * fix: add missing and indication of what's needed * fix: copy pasteable errors * fix: use Loader:read_names * Update xtask/src/helpers.rs Co-authored-by: Ivan Tham <pickfire@riseup.net> * fix: remove into and clone for str * Update book/src/themes.md Co-authored-by: Ivan Tham <pickfire@riseup.net> * fix: better lint output * fix: cleaner logic for lint reporting * style: use explicit imports * Pascal support (#3542) * fix: add difference check for statusline normal,insert,select * fix: fg for whitespace and early exit if and one is ok * chore: cleaning up, no idea how these got here or how this will look * chore: revert from older commit? * refactor: use static fn to equalize api between difference and existance * refactor: querycheck and clippy * refactor: clippy fixes * fix: query-check behaves as before * fix: error with x of y message, not total count * fix: consistent reporting and less mutable state * fix: selection difference ref #3942 ref #1833 Co-authored-by: Ivan Tham <pickfire@riseup.net> Co-authored-by: ath3 <45574139+ath3@users.noreply.github.com>
This commit is contained in:
parent
5ea7855977
commit
08b2ecc99a
9 changed files with 451 additions and 238 deletions
1
Cargo.lock
generated
1
Cargo.lock
generated
|
@ -1469,5 +1469,6 @@ dependencies = [
|
||||||
"helix-core",
|
"helix-core",
|
||||||
"helix-loader",
|
"helix-loader",
|
||||||
"helix-term",
|
"helix-term",
|
||||||
|
"helix-view",
|
||||||
"toml",
|
"toml",
|
||||||
]
|
]
|
||||||
|
|
|
@ -254,4 +254,10 @@ These scopes are used for theming the editor interface.
|
||||||
| `diagnostic.warning` | Diagnostics warning (editing area) |
|
| `diagnostic.warning` | Diagnostics warning (editing area) |
|
||||||
| `diagnostic.error` | Diagnostics error (editing area) |
|
| `diagnostic.error` | Diagnostics error (editing area) |
|
||||||
|
|
||||||
|
You can check compliance to spec with
|
||||||
|
|
||||||
|
```shell
|
||||||
|
cargo xtask themelint onedark # replace onedark with <name>
|
||||||
|
```
|
||||||
|
|
||||||
[editor-section]: ./configuration.md#editor-section
|
[editor-section]: ./configuration.md#editor-section
|
||||||
|
|
|
@ -8,5 +8,6 @@ edition = "2021"
|
||||||
[dependencies]
|
[dependencies]
|
||||||
helix-term = { version = "0.6", path = "../helix-term" }
|
helix-term = { version = "0.6", path = "../helix-term" }
|
||||||
helix-core = { version = "0.6", path = "../helix-core" }
|
helix-core = { version = "0.6", path = "../helix-core" }
|
||||||
|
helix-view = { version = "0.6", path = "../helix-view" }
|
||||||
helix-loader = { version = "0.6", path = "../helix-loader" }
|
helix-loader = { version = "0.6", path = "../helix-loader" }
|
||||||
toml = "0.5"
|
toml = "0.5"
|
||||||
|
|
117
xtask/src/docgen.rs
Normal file
117
xtask/src/docgen.rs
Normal file
|
@ -0,0 +1,117 @@
|
||||||
|
use crate::helpers;
|
||||||
|
use crate::path;
|
||||||
|
use crate::DynError;
|
||||||
|
|
||||||
|
use helix_term::commands::TYPABLE_COMMAND_LIST;
|
||||||
|
use helix_term::health::TsFeature;
|
||||||
|
use std::fs;
|
||||||
|
|
||||||
|
pub const TYPABLE_COMMANDS_MD_OUTPUT: &str = "typable-cmd.md";
|
||||||
|
pub const LANG_SUPPORT_MD_OUTPUT: &str = "lang-support.md";
|
||||||
|
|
||||||
|
fn md_table_heading(cols: &[String]) -> String {
|
||||||
|
let mut header = String::new();
|
||||||
|
header += &md_table_row(cols);
|
||||||
|
header += &md_table_row(&vec!["---".to_string(); cols.len()]);
|
||||||
|
header
|
||||||
|
}
|
||||||
|
|
||||||
|
fn md_table_row(cols: &[String]) -> String {
|
||||||
|
format!("| {} |\n", cols.join(" | "))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn md_mono(s: &str) -> String {
|
||||||
|
format!("`{}`", s)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn typable_commands() -> Result<String, DynError> {
|
||||||
|
let mut md = String::new();
|
||||||
|
md.push_str(&md_table_heading(&[
|
||||||
|
"Name".to_owned(),
|
||||||
|
"Description".to_owned(),
|
||||||
|
]));
|
||||||
|
|
||||||
|
let cmdify = |s: &str| format!("`:{}`", s);
|
||||||
|
|
||||||
|
for cmd in TYPABLE_COMMAND_LIST {
|
||||||
|
let names = std::iter::once(&cmd.name)
|
||||||
|
.chain(cmd.aliases.iter())
|
||||||
|
.map(|a| cmdify(a))
|
||||||
|
.collect::<Vec<_>>()
|
||||||
|
.join(", ");
|
||||||
|
|
||||||
|
let doc = cmd.doc.replace('\n', "<br>");
|
||||||
|
|
||||||
|
md.push_str(&md_table_row(&[names.to_owned(), doc.to_owned()]));
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(md)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn lang_features() -> Result<String, DynError> {
|
||||||
|
let mut md = String::new();
|
||||||
|
let ts_features = TsFeature::all();
|
||||||
|
|
||||||
|
let mut cols = vec!["Language".to_owned()];
|
||||||
|
cols.append(
|
||||||
|
&mut ts_features
|
||||||
|
.iter()
|
||||||
|
.map(|t| t.long_title().to_string())
|
||||||
|
.collect::<Vec<_>>(),
|
||||||
|
);
|
||||||
|
cols.push("Default LSP".to_owned());
|
||||||
|
|
||||||
|
md.push_str(&md_table_heading(&cols));
|
||||||
|
let config = helpers::lang_config();
|
||||||
|
|
||||||
|
let mut langs = config
|
||||||
|
.language
|
||||||
|
.iter()
|
||||||
|
.map(|l| l.language_id.clone())
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
langs.sort_unstable();
|
||||||
|
|
||||||
|
let mut ts_features_to_langs = Vec::new();
|
||||||
|
for &feat in ts_features {
|
||||||
|
ts_features_to_langs.push((feat, helpers::ts_lang_support(feat)));
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut row = Vec::new();
|
||||||
|
for lang in langs {
|
||||||
|
let lc = config
|
||||||
|
.language
|
||||||
|
.iter()
|
||||||
|
.find(|l| l.language_id == lang)
|
||||||
|
.unwrap(); // lang comes from config
|
||||||
|
row.push(lc.language_id.clone());
|
||||||
|
|
||||||
|
for (_feat, support_list) in &ts_features_to_langs {
|
||||||
|
row.push(
|
||||||
|
if support_list.contains(&lang) {
|
||||||
|
"✓"
|
||||||
|
} else {
|
||||||
|
""
|
||||||
|
}
|
||||||
|
.to_owned(),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
row.push(
|
||||||
|
lc.language_server
|
||||||
|
.as_ref()
|
||||||
|
.map(|s| s.command.clone())
|
||||||
|
.map(|c| md_mono(&c))
|
||||||
|
.unwrap_or_default(),
|
||||||
|
);
|
||||||
|
|
||||||
|
md.push_str(&md_table_row(&row));
|
||||||
|
row.clear();
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(md)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn write(filename: &str, data: &str) {
|
||||||
|
let error = format!("Could not write to {}", filename);
|
||||||
|
let path = path::book_gen().join(filename);
|
||||||
|
fs::write(path, data).expect(&error);
|
||||||
|
}
|
44
xtask/src/helpers.rs
Normal file
44
xtask/src/helpers.rs
Normal file
|
@ -0,0 +1,44 @@
|
||||||
|
use std::path::{Path, PathBuf};
|
||||||
|
|
||||||
|
use crate::path;
|
||||||
|
use helix_core::syntax::Configuration as LangConfig;
|
||||||
|
use helix_term::health::TsFeature;
|
||||||
|
|
||||||
|
/// Get the list of languages that support a particular tree-sitter
|
||||||
|
/// based feature.
|
||||||
|
pub fn ts_lang_support(feat: TsFeature) -> Vec<String> {
|
||||||
|
let queries_dir = path::ts_queries();
|
||||||
|
|
||||||
|
find_files(&queries_dir, feat.runtime_filename())
|
||||||
|
.iter()
|
||||||
|
.map(|f| {
|
||||||
|
// .../helix/runtime/queries/python/highlights.scm
|
||||||
|
let tail = f.strip_prefix(&queries_dir).unwrap(); // python/highlights.scm
|
||||||
|
let lang = tail.components().next().unwrap(); // python
|
||||||
|
lang.as_os_str().to_string_lossy().to_string()
|
||||||
|
})
|
||||||
|
.collect()
|
||||||
|
}
|
||||||
|
|
||||||
|
// naive implementation, but suffices for our needs
|
||||||
|
pub fn find_files(dir: &Path, filename: &str) -> Vec<PathBuf> {
|
||||||
|
std::fs::read_dir(dir)
|
||||||
|
.unwrap()
|
||||||
|
.filter_map(|entry| {
|
||||||
|
let path = entry.ok()?.path();
|
||||||
|
if path.is_dir() {
|
||||||
|
Some(find_files(&path, filename))
|
||||||
|
} else if path.file_name()?.to_string_lossy() == filename {
|
||||||
|
Some(vec![path])
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.flatten()
|
||||||
|
.collect()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn lang_config() -> LangConfig {
|
||||||
|
let bytes = std::fs::read(path::lang_config()).unwrap();
|
||||||
|
toml::from_slice(&bytes).unwrap()
|
||||||
|
}
|
|
@ -1,255 +1,35 @@
|
||||||
|
mod docgen;
|
||||||
|
mod helpers;
|
||||||
|
mod path;
|
||||||
|
mod querycheck;
|
||||||
|
mod themelint;
|
||||||
|
|
||||||
use std::{env, error::Error};
|
use std::{env, error::Error};
|
||||||
|
|
||||||
type DynError = Box<dyn Error>;
|
type DynError = Box<dyn Error>;
|
||||||
|
|
||||||
pub mod helpers {
|
|
||||||
use std::path::{Path, PathBuf};
|
|
||||||
|
|
||||||
use crate::path;
|
|
||||||
use helix_core::syntax::Configuration as LangConfig;
|
|
||||||
use helix_term::health::TsFeature;
|
|
||||||
|
|
||||||
/// Get the list of languages that support a particular tree-sitter
|
|
||||||
/// based feature.
|
|
||||||
pub fn ts_lang_support(feat: TsFeature) -> Vec<String> {
|
|
||||||
let queries_dir = path::ts_queries();
|
|
||||||
|
|
||||||
find_files(&queries_dir, feat.runtime_filename())
|
|
||||||
.iter()
|
|
||||||
.map(|f| {
|
|
||||||
// .../helix/runtime/queries/python/highlights.scm
|
|
||||||
let tail = f.strip_prefix(&queries_dir).unwrap(); // python/highlights.scm
|
|
||||||
let lang = tail.components().next().unwrap(); // python
|
|
||||||
lang.as_os_str().to_string_lossy().to_string()
|
|
||||||
})
|
|
||||||
.collect()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Get the list of languages that have any form of tree-sitter
|
|
||||||
/// queries defined in the runtime directory.
|
|
||||||
pub fn langs_with_ts_queries() -> Vec<String> {
|
|
||||||
std::fs::read_dir(path::ts_queries())
|
|
||||||
.unwrap()
|
|
||||||
.filter_map(|entry| {
|
|
||||||
let entry = entry.ok()?;
|
|
||||||
entry
|
|
||||||
.file_type()
|
|
||||||
.ok()?
|
|
||||||
.is_dir()
|
|
||||||
.then(|| entry.file_name().to_string_lossy().to_string())
|
|
||||||
})
|
|
||||||
.collect()
|
|
||||||
}
|
|
||||||
|
|
||||||
// naive implementation, but suffices for our needs
|
|
||||||
pub fn find_files(dir: &Path, filename: &str) -> Vec<PathBuf> {
|
|
||||||
std::fs::read_dir(dir)
|
|
||||||
.unwrap()
|
|
||||||
.filter_map(|entry| {
|
|
||||||
let path = entry.ok()?.path();
|
|
||||||
if path.is_dir() {
|
|
||||||
Some(find_files(&path, filename))
|
|
||||||
} else {
|
|
||||||
(path.file_name()?.to_string_lossy() == filename).then(|| vec![path])
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.flatten()
|
|
||||||
.collect()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn lang_config() -> LangConfig {
|
|
||||||
let bytes = std::fs::read(path::lang_config()).unwrap();
|
|
||||||
toml::from_slice(&bytes).unwrap()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub mod md_gen {
|
|
||||||
use crate::DynError;
|
|
||||||
|
|
||||||
use crate::helpers;
|
|
||||||
use crate::path;
|
|
||||||
use helix_term::commands::TYPABLE_COMMAND_LIST;
|
|
||||||
use helix_term::health::TsFeature;
|
|
||||||
use std::fs;
|
|
||||||
|
|
||||||
pub const TYPABLE_COMMANDS_MD_OUTPUT: &str = "typable-cmd.md";
|
|
||||||
pub const LANG_SUPPORT_MD_OUTPUT: &str = "lang-support.md";
|
|
||||||
|
|
||||||
fn md_table_heading(cols: &[String]) -> String {
|
|
||||||
let mut header = String::new();
|
|
||||||
header += &md_table_row(cols);
|
|
||||||
header += &md_table_row(&vec!["---".to_string(); cols.len()]);
|
|
||||||
header
|
|
||||||
}
|
|
||||||
|
|
||||||
fn md_table_row(cols: &[String]) -> String {
|
|
||||||
format!("| {} |\n", cols.join(" | "))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn md_mono(s: &str) -> String {
|
|
||||||
format!("`{}`", s)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn typable_commands() -> Result<String, DynError> {
|
|
||||||
let mut md = String::new();
|
|
||||||
md.push_str(&md_table_heading(&[
|
|
||||||
"Name".to_owned(),
|
|
||||||
"Description".to_owned(),
|
|
||||||
]));
|
|
||||||
|
|
||||||
let cmdify = |s: &str| format!("`:{}`", s);
|
|
||||||
|
|
||||||
for cmd in TYPABLE_COMMAND_LIST {
|
|
||||||
let names = std::iter::once(&cmd.name)
|
|
||||||
.chain(cmd.aliases.iter())
|
|
||||||
.map(|a| cmdify(a))
|
|
||||||
.collect::<Vec<_>>()
|
|
||||||
.join(", ");
|
|
||||||
|
|
||||||
let doc = cmd.doc.replace('\n', "<br>");
|
|
||||||
|
|
||||||
md.push_str(&md_table_row(&[names.to_owned(), doc.to_owned()]));
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(md)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn lang_features() -> Result<String, DynError> {
|
|
||||||
let mut md = String::new();
|
|
||||||
let ts_features = TsFeature::all();
|
|
||||||
|
|
||||||
let mut cols = vec!["Language".to_owned()];
|
|
||||||
cols.append(
|
|
||||||
&mut ts_features
|
|
||||||
.iter()
|
|
||||||
.map(|t| t.long_title().to_string())
|
|
||||||
.collect::<Vec<_>>(),
|
|
||||||
);
|
|
||||||
cols.push("Default LSP".to_owned());
|
|
||||||
|
|
||||||
md.push_str(&md_table_heading(&cols));
|
|
||||||
let config = helpers::lang_config();
|
|
||||||
|
|
||||||
let mut langs = config
|
|
||||||
.language
|
|
||||||
.iter()
|
|
||||||
.map(|l| l.language_id.clone())
|
|
||||||
.collect::<Vec<_>>();
|
|
||||||
langs.sort_unstable();
|
|
||||||
|
|
||||||
let mut ts_features_to_langs = Vec::new();
|
|
||||||
for &feat in ts_features {
|
|
||||||
ts_features_to_langs.push((feat, helpers::ts_lang_support(feat)));
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut row = Vec::new();
|
|
||||||
for lang in langs {
|
|
||||||
let lc = config
|
|
||||||
.language
|
|
||||||
.iter()
|
|
||||||
.find(|l| l.language_id == lang)
|
|
||||||
.unwrap(); // lang comes from config
|
|
||||||
row.push(lc.language_id.clone());
|
|
||||||
|
|
||||||
for (_feat, support_list) in &ts_features_to_langs {
|
|
||||||
row.push(
|
|
||||||
if support_list.contains(&lang) {
|
|
||||||
"✓"
|
|
||||||
} else {
|
|
||||||
""
|
|
||||||
}
|
|
||||||
.to_owned(),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
row.push(
|
|
||||||
lc.language_server
|
|
||||||
.as_ref()
|
|
||||||
.map(|s| s.command.clone())
|
|
||||||
.map(|c| md_mono(&c))
|
|
||||||
.unwrap_or_default(),
|
|
||||||
);
|
|
||||||
|
|
||||||
md.push_str(&md_table_row(&row));
|
|
||||||
row.clear();
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(md)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn write(filename: &str, data: &str) {
|
|
||||||
let error = format!("Could not write to {}", filename);
|
|
||||||
let path = path::book_gen().join(filename);
|
|
||||||
fs::write(path, data).expect(&error);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub mod path {
|
|
||||||
use std::path::{Path, PathBuf};
|
|
||||||
|
|
||||||
pub fn project_root() -> PathBuf {
|
|
||||||
Path::new(env!("CARGO_MANIFEST_DIR"))
|
|
||||||
.parent()
|
|
||||||
.unwrap()
|
|
||||||
.to_path_buf()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn book_gen() -> PathBuf {
|
|
||||||
project_root().join("book/src/generated/")
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn ts_queries() -> PathBuf {
|
|
||||||
project_root().join("runtime/queries")
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn lang_config() -> PathBuf {
|
|
||||||
project_root().join("languages.toml")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub mod tasks {
|
pub mod tasks {
|
||||||
use crate::md_gen;
|
use crate::docgen::{lang_features, typable_commands, write};
|
||||||
|
use crate::docgen::{LANG_SUPPORT_MD_OUTPUT, TYPABLE_COMMANDS_MD_OUTPUT};
|
||||||
|
use crate::querycheck::query_check;
|
||||||
|
use crate::themelint::{lint, lint_all};
|
||||||
use crate::DynError;
|
use crate::DynError;
|
||||||
|
|
||||||
pub fn docgen() -> Result<(), DynError> {
|
pub fn docgen() -> Result<(), DynError> {
|
||||||
use md_gen::*;
|
|
||||||
write(TYPABLE_COMMANDS_MD_OUTPUT, &typable_commands()?);
|
write(TYPABLE_COMMANDS_MD_OUTPUT, &typable_commands()?);
|
||||||
write(LANG_SUPPORT_MD_OUTPUT, &lang_features()?);
|
write(LANG_SUPPORT_MD_OUTPUT, &lang_features()?);
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn query_check() -> Result<(), String> {
|
pub fn themelint(file: Option<String>) -> Result<(), DynError> {
|
||||||
use crate::helpers::lang_config;
|
match file {
|
||||||
use helix_core::{syntax::read_query, tree_sitter::Query};
|
Some(file) => lint(file),
|
||||||
use helix_loader::grammar::get_language;
|
None => lint_all(),
|
||||||
|
|
||||||
let query_files = [
|
|
||||||
"highlights.scm",
|
|
||||||
"locals.scm",
|
|
||||||
"injections.scm",
|
|
||||||
"textobjects.scm",
|
|
||||||
"indents.scm",
|
|
||||||
];
|
|
||||||
|
|
||||||
for language in lang_config().language {
|
|
||||||
let language_name = language.language_id.to_ascii_lowercase();
|
|
||||||
let grammar_name = language.grammar.unwrap_or(language.language_id);
|
|
||||||
for query_file in query_files {
|
|
||||||
let language = get_language(&grammar_name);
|
|
||||||
let query_text = read_query(&language_name, query_file);
|
|
||||||
if !query_text.is_empty() && language.is_ok() {
|
|
||||||
if let Err(reason) = Query::new(language.unwrap(), &query_text) {
|
|
||||||
return Err(format!(
|
|
||||||
"Failed to parse {} queries for {}: {}",
|
|
||||||
query_file, language_name, reason
|
|
||||||
));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
println!("Query check succeeded");
|
pub fn querycheck() -> Result<(), DynError> {
|
||||||
|
query_check()
|
||||||
Ok(())
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn print_help() {
|
pub fn print_help() {
|
||||||
|
@ -259,6 +39,7 @@ Usage: Run with `cargo xtask <task>`, eg. `cargo xtask docgen`.
|
||||||
|
|
||||||
Tasks:
|
Tasks:
|
||||||
docgen: Generate files to be included in the mdbook output.
|
docgen: Generate files to be included in the mdbook output.
|
||||||
|
themelint <theme>: Report errors for <theme>, or all themes if no theme is specified.
|
||||||
query-check: Check that tree-sitter queries are valid.
|
query-check: Check that tree-sitter queries are valid.
|
||||||
"
|
"
|
||||||
);
|
);
|
||||||
|
@ -271,7 +52,8 @@ fn main() -> Result<(), DynError> {
|
||||||
None => tasks::print_help(),
|
None => tasks::print_help(),
|
||||||
Some(t) => match t.as_str() {
|
Some(t) => match t.as_str() {
|
||||||
"docgen" => tasks::docgen()?,
|
"docgen" => tasks::docgen()?,
|
||||||
"query-check" => tasks::query_check()?,
|
"themelint" => tasks::themelint(env::args().nth(2))?,
|
||||||
|
"query-check" => tasks::querycheck()?,
|
||||||
invalid => return Err(format!("Invalid task name: {}", invalid).into()),
|
invalid => return Err(format!("Invalid task name: {}", invalid).into()),
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
24
xtask/src/path.rs
Normal file
24
xtask/src/path.rs
Normal file
|
@ -0,0 +1,24 @@
|
||||||
|
use std::path::{Path, PathBuf};
|
||||||
|
|
||||||
|
pub fn project_root() -> PathBuf {
|
||||||
|
Path::new(env!("CARGO_MANIFEST_DIR"))
|
||||||
|
.parent()
|
||||||
|
.unwrap()
|
||||||
|
.to_path_buf()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn book_gen() -> PathBuf {
|
||||||
|
project_root().join("book/src/generated/")
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn ts_queries() -> PathBuf {
|
||||||
|
project_root().join("runtime/queries")
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn lang_config() -> PathBuf {
|
||||||
|
project_root().join("languages.toml")
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn themes() -> PathBuf {
|
||||||
|
project_root().join("runtime/themes")
|
||||||
|
}
|
39
xtask/src/querycheck.rs
Normal file
39
xtask/src/querycheck.rs
Normal file
|
@ -0,0 +1,39 @@
|
||||||
|
use crate::DynError;
|
||||||
|
|
||||||
|
pub fn query_check() -> Result<(), DynError> {
|
||||||
|
use crate::helpers::lang_config;
|
||||||
|
use helix_core::{syntax::read_query, tree_sitter::Query};
|
||||||
|
use helix_loader::grammar::get_language;
|
||||||
|
|
||||||
|
let query_files = [
|
||||||
|
"highlights.scm",
|
||||||
|
"locals.scm",
|
||||||
|
"injections.scm",
|
||||||
|
"textobjects.scm",
|
||||||
|
"indents.scm",
|
||||||
|
];
|
||||||
|
|
||||||
|
for language in lang_config().language {
|
||||||
|
let language_name = language.language_id.to_ascii_lowercase();
|
||||||
|
let grammar_name = language.grammar.unwrap_or(language.language_id);
|
||||||
|
for query_file in query_files {
|
||||||
|
let language = get_language(&grammar_name);
|
||||||
|
let query_text = read_query(&language_name, query_file);
|
||||||
|
if let Ok(lang) = language {
|
||||||
|
if !query_text.is_empty() {
|
||||||
|
if let Err(reason) = Query::new(lang, &query_text) {
|
||||||
|
return Err(format!(
|
||||||
|
"Failed to parse {} queries for {}: {}",
|
||||||
|
query_file, language_name, reason
|
||||||
|
)
|
||||||
|
.into());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
println!("Query check succeeded");
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
199
xtask/src/themelint.rs
Normal file
199
xtask/src/themelint.rs
Normal file
|
@ -0,0 +1,199 @@
|
||||||
|
use crate::path;
|
||||||
|
use crate::DynError;
|
||||||
|
use helix_view::theme::Loader;
|
||||||
|
use helix_view::theme::Modifier;
|
||||||
|
use helix_view::Theme;
|
||||||
|
|
||||||
|
struct Rule {
|
||||||
|
fg: Option<&'static str>,
|
||||||
|
bg: Option<&'static str>,
|
||||||
|
check_both: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
enum Require {
|
||||||
|
Existence(Rule),
|
||||||
|
Difference(&'static str, &'static str),
|
||||||
|
}
|
||||||
|
|
||||||
|
// Placed in an fn here, so it's the first thing you see
|
||||||
|
fn get_rules() -> Vec<Require> {
|
||||||
|
vec![
|
||||||
|
// Check for ui.selection, which is required
|
||||||
|
Require::Existence(Rule::has_either("ui.selection")),
|
||||||
|
Require::Existence(Rule::has_either("ui.selection.primary")),
|
||||||
|
Require::Difference("ui.selection", "ui.selection.primary"),
|
||||||
|
// Check for planned readable text
|
||||||
|
Require::Existence(Rule::has_fg("ui.text")),
|
||||||
|
Require::Existence(Rule::has_bg("ui.background")),
|
||||||
|
// Check for complete editor.statusline bare minimum
|
||||||
|
Require::Existence(Rule::has_both("ui.statusline")),
|
||||||
|
Require::Existence(Rule::has_both("ui.statusline.inactive")),
|
||||||
|
// Check for editor.color-modes
|
||||||
|
Require::Existence(Rule::has_either("ui.statusline.normal")),
|
||||||
|
Require::Existence(Rule::has_either("ui.statusline.insert")),
|
||||||
|
Require::Existence(Rule::has_either("ui.statusline.select")),
|
||||||
|
Require::Difference("ui.statusline.normal", "ui.statusline.insert"),
|
||||||
|
Require::Difference("ui.statusline.normal", "ui.statusline.select"),
|
||||||
|
// Check for editor.cursorline
|
||||||
|
Require::Existence(Rule::has_bg("ui.cursorline.primary")),
|
||||||
|
// Check for editor.whitespace
|
||||||
|
Require::Existence(Rule::has_fg("ui.virtual.whitespace")),
|
||||||
|
// Check fir rulers
|
||||||
|
Require::Existence(Rule::has_either("ui.virtual.indent-guide")),
|
||||||
|
// Check for editor.rulers
|
||||||
|
Require::Existence(Rule::has_either("ui.virtual.ruler")),
|
||||||
|
// Check for menus and prompts
|
||||||
|
Require::Existence(Rule::has_both("ui.menu")),
|
||||||
|
Require::Existence(Rule::has_both("ui.help")),
|
||||||
|
Require::Existence(Rule::has_bg("ui.popup")),
|
||||||
|
Require::Existence(Rule::has_either("ui.window")),
|
||||||
|
// Check for visible cursor
|
||||||
|
Require::Existence(Rule::has_bg("ui.cursor.primary")),
|
||||||
|
Require::Existence(Rule::has_either("ui.cursor.match")),
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Rule {
|
||||||
|
fn has_bg(bg: &'static str) -> Rule {
|
||||||
|
Rule {
|
||||||
|
fg: None,
|
||||||
|
bg: Some(bg),
|
||||||
|
check_both: true,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
fn has_fg(fg: &'static str) -> Rule {
|
||||||
|
Rule {
|
||||||
|
fg: Some(fg),
|
||||||
|
bg: None,
|
||||||
|
check_both: true,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
fn has_either(item: &'static str) -> Rule {
|
||||||
|
Rule {
|
||||||
|
fg: Some(item),
|
||||||
|
bg: Some(item),
|
||||||
|
check_both: false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
fn has_both(item: &'static str) -> Rule {
|
||||||
|
Rule {
|
||||||
|
fg: Some(item),
|
||||||
|
bg: Some(item),
|
||||||
|
check_both: true,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
fn found_fg(&self, theme: &Theme) -> bool {
|
||||||
|
if let Some(fg) = &self.fg {
|
||||||
|
if theme.get(fg).fg.is_none() && theme.get(fg).add_modifier == Modifier::empty() {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
true
|
||||||
|
}
|
||||||
|
fn found_bg(&self, theme: &Theme) -> bool {
|
||||||
|
if let Some(bg) = &self.bg {
|
||||||
|
if theme.get(bg).bg.is_none() && theme.get(bg).add_modifier == Modifier::empty() {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
true
|
||||||
|
}
|
||||||
|
fn rule_name(&self) -> &'static str {
|
||||||
|
if self.fg.is_some() {
|
||||||
|
self.fg.unwrap()
|
||||||
|
} else if self.bg.is_some() {
|
||||||
|
self.bg.unwrap()
|
||||||
|
} else {
|
||||||
|
"LINTER_ERROR_NO_RULE"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn check_difference(
|
||||||
|
theme: &Theme,
|
||||||
|
a: &'static str,
|
||||||
|
b: &'static str,
|
||||||
|
messages: &mut Vec<String>,
|
||||||
|
) {
|
||||||
|
let theme_a = theme.get(a);
|
||||||
|
let theme_b = theme.get(b);
|
||||||
|
if theme_a == theme_b {
|
||||||
|
messages.push(format!("$THEME: `{}` and `{}` cannot be equal", a, b));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn check_existence(rule: &Rule, theme: &Theme, messages: &mut Vec<String>) {
|
||||||
|
let found_fg = rule.found_fg(theme);
|
||||||
|
let found_bg = rule.found_bg(theme);
|
||||||
|
|
||||||
|
if !rule.check_both && (found_fg || found_bg) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
if !found_fg || !found_bg {
|
||||||
|
let mut missing = vec![];
|
||||||
|
if !found_fg {
|
||||||
|
missing.push("`fg`");
|
||||||
|
}
|
||||||
|
if !found_bg {
|
||||||
|
missing.push("`bg`");
|
||||||
|
}
|
||||||
|
let entry = if !rule.check_both && !found_fg && !found_bg {
|
||||||
|
missing.join(" or ")
|
||||||
|
} else {
|
||||||
|
missing.join(" and ")
|
||||||
|
};
|
||||||
|
messages.push(format!(
|
||||||
|
"$THEME: missing {} for `{}`",
|
||||||
|
entry,
|
||||||
|
rule.rule_name()
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn lint(file: String) -> Result<(), DynError> {
|
||||||
|
if file.contains("base16") {
|
||||||
|
println!("Skipping base16: {}", file);
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
let path = path::themes().join(file.clone() + ".toml");
|
||||||
|
let theme = std::fs::read(&path).unwrap();
|
||||||
|
let theme: Theme = toml::from_slice(&theme).expect("Failed to parse theme");
|
||||||
|
|
||||||
|
let mut messages: Vec<String> = vec![];
|
||||||
|
get_rules().iter().for_each(|lint| match lint {
|
||||||
|
Require::Existence(rule) => Rule::check_existence(rule, &theme, &mut messages),
|
||||||
|
Require::Difference(a, b) => Rule::check_difference(&theme, a, b, &mut messages),
|
||||||
|
});
|
||||||
|
|
||||||
|
if !messages.is_empty() {
|
||||||
|
messages.iter().for_each(|m| {
|
||||||
|
let theme = file.clone();
|
||||||
|
let message = m.replace("$THEME", theme.as_str());
|
||||||
|
println!("{}", message);
|
||||||
|
});
|
||||||
|
Err(format!("{} has issues", file.clone().as_str()).into())
|
||||||
|
} else {
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn lint_all() -> Result<(), DynError> {
|
||||||
|
let files = Loader::read_names(path::themes().as_path());
|
||||||
|
let files_count = files.len();
|
||||||
|
let ok_files_count = files
|
||||||
|
.into_iter()
|
||||||
|
.filter_map(|path| lint(path.replace(".toml", "")).ok())
|
||||||
|
.collect::<Vec<()>>()
|
||||||
|
.len();
|
||||||
|
|
||||||
|
if files_count != ok_files_count {
|
||||||
|
Err(format!(
|
||||||
|
"{} of {} themes had issues",
|
||||||
|
files_count - ok_files_count,
|
||||||
|
files_count
|
||||||
|
)
|
||||||
|
.into())
|
||||||
|
} else {
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
Loading…
Add table
Reference in a new issue