1
1
mirror of https://github.com/oxalica/nil.git synced 2024-10-27 04:19:40 +03:00

Apply clippy::{doc_markdown,missing-panics-doc}

This commit is contained in:
oxalica 2023-03-10 15:04:35 +08:00
parent 1eab5d2ea5
commit 4a9f128200
12 changed files with 56 additions and 36 deletions

View File

@ -13,6 +13,7 @@ pub enum BuiltinKind {
Attrset,
}
#[allow(clippy::all)]
pub static ALL_BUILTINS: phf::Map<&'static str, Builtin> =
include!(concat!(env!("OUT_DIR"), "/generated.expr"));

View File

@ -222,11 +222,6 @@ impl FileRange {
pub fn empty(pos: FilePos) -> Self {
Self::new(pos.file_id, TextRange::empty(pos.pos))
}
pub fn span(start: FilePos, end: FilePos) -> Self {
assert_eq!(start.file_id, end.file_id);
Self::new(start.file_id, TextRange::new(start.pos, end.pos))
}
}
#[salsa::query_group(SourceDatabaseStorage)]

View File

@ -411,7 +411,7 @@ impl MergingSet {
}
/// Push a dynamic Attr. This is also used for error recovery,
/// so InvalidDynamic is not checked here.
/// so `InvalidDynamic` is not checked here.
fn push_dynamic(
&mut self,
ctx: &mut LowerCtx,

View File

@ -11,7 +11,7 @@ use nix_interop::{DEFAULT_IMPORT_FILE, FLAKE_FILE};
use std::collections::HashMap;
use std::{mem, ops};
use syntax::ast::AstNode;
use syntax::{NixLanguage, SyntaxNode, TextSize};
use syntax::{NixLanguage, SyntaxNode, TextRange, TextSize};
pub const MARKER_INDICATOR: char = '$';
@ -201,7 +201,13 @@ impl Fixture {
pub fn unwrap_single_range_marker(&self) -> FileRange {
match *self.markers() {
[fpos] => FileRange::empty(fpos),
[start, end] => FileRange::span(start, end),
[start, end] => {
assert_eq!(
start.file_id, end.file_id,
"Start and end markers must be in the same file"
);
FileRange::new(start.file_id, TextRange::new(start.pos, end.pos))
}
_ => panic!("Must have either 1 or 2 markers"),
}
}

View File

@ -71,7 +71,7 @@ fn merge_attrset(lhs: &Ty, rhs: &Ty) -> Ty {
})
}
/// https://nixos.wiki/wiki/Flakes
/// <https://nixos.wiki/wiki/Flakes>
pub fn flake(inputs: &[&str]) -> Ty {
// https://nixos.org/manual/nix/stable/command-ref/new-cli/nix3-flake.html#flake-references
let input_ty = merge_attrset(

View File

@ -7,7 +7,7 @@ mod semantic_tokens;
mod server;
mod vfs;
use anyhow::Result;
use anyhow::{Context, Result};
use ide::VfsPath;
use lsp_server::{Connection, ErrorCode};
use lsp_types::{InitializeParams, Url};
@ -68,8 +68,9 @@ impl UrlExt for Url {
}
pub fn main_loop(conn: Connection) -> Result<()> {
let init_params =
conn.initialize(serde_json::to_value(capabilities::server_capabilities()).unwrap())?;
let init_params = conn.initialize(
serde_json::to_value(capabilities::server_capabilities()).context("Invalid init_params")?,
)?;
tracing::info!("Init params: {}", init_params);
let init_params = serde_json::from_value::<InitializeParams>(init_params)?;

View File

@ -1,6 +1,6 @@
use lsp_types::request::Request;
/// https://github.com/microsoft/language-server-protocol/issues/1002
/// <https://github.com/microsoft/language-server-protocol/issues/1002>
pub enum ParentModule {}
impl Request for ParentModule {

View File

@ -3,7 +3,7 @@
//! We want a custom `nix flake archive` without dumping the current flake
//! which may be very costly for large repositories like nixpkgs.
//!
//! https://github.com/NixOS/nix/blob/2.13.1/src/nix/flake.md#lock-files
//! <https://github.com/NixOS/nix/blob/2.13.1/src/nix/flake.md#lock-files>
use std::collections::{HashMap, HashSet};
use std::path::Path;

View File

@ -132,7 +132,13 @@ regex_dfa! {
pub type LexTokens = Vec<(SyntaxKind, TextRange)>;
/// Tokenize the source of a Nix file.
///
/// # Panics
/// Panic if the source is longer than `u32::MAX`.
pub fn lex(src: &[u8]) -> LexTokens {
assert!(u32::try_from(src.len()).is_ok());
let total_len = TextSize::try_from(src.len()).expect("Length overflow");
let default_ctx = (&*DEFAULT_TOKEN_DFA, DEFAULT_TOKEN_MAP);
@ -152,6 +158,7 @@ pub fn lex(src: &[u8]) -> LexTokens {
// The length of src is checked before.
Some(m) => (
map[m.pattern().as_usize()],
// Offset <= u32, already checked.
TextSize::from(m.offset() as u32),
),
None if ptr::eq(dfa, path_ctx.0) => {

View File

@ -18,7 +18,7 @@ impl Parse {
}
pub fn root(&self) -> SourceFile {
SourceFile::cast(self.syntax_node()).unwrap()
SourceFile::cast(self.syntax_node()).expect("The entry node is SourceFile")
}
pub fn syntax_node(&self) -> SyntaxNode {
@ -30,8 +30,13 @@ impl Parse {
}
}
/// Parse the source of a Nix file.
///
/// # Panics
/// Panic if the source is longer than `u32::MAX`.
pub fn parse_file(src: &str) -> Parse {
assert!(src.len() < u32::MAX as usize);
assert!(u32::try_from(src.len()).is_ok());
let mut tokens = lexer::lex(src.as_bytes());
tokens.reverse();
Parser {
@ -115,7 +120,7 @@ impl<'i> Parser<'i> {
self.builder.token(kind.into(), &self.src[range]);
}
/// Consume the next token and wrap it in an ERROR node.
/// Consume the next token and wrap it in an `ERROR` node.
fn bump_error(&mut self) {
self.start_node(ERROR);
self.bump();
@ -129,7 +134,7 @@ impl<'i> Parser<'i> {
self.tokens.last().copied()
}
/// Like `peek`, but only returns SyntaxKind.
/// Like `peek`, but only returns `SyntaxKind`.
fn peek(&mut self) -> Option<SyntaxKind> {
self.peek_full().map(|(k, _)| k)
}
@ -266,7 +271,7 @@ impl<'i> Parser<'i> {
self.finish_node();
}
Some(T!['{']) => {
// Recognise patterns of LAMBDA starting. Otherwise, it's an ATTR_SET.
// Recognise patterns of `LAMBDA` starting. Otherwise, it's an `ATTR_SET`.
// - '{ ...'
// - '{ } :'
// - '{ } @'
@ -316,7 +321,7 @@ impl<'i> Parser<'i> {
}
}
Some(IDENT) => {
// Recognise patterns of LAMBDA starting. Otherwise, it's an REF.
// Recognise patterns of `LAMBDA` starting. Otherwise, it's a `REF`.
// - 'x :'
// - 'x @'
let is_lambda = matches!(self.peek_iter_non_ws().nth(1), Some(T![:] | T![@]));
@ -392,7 +397,7 @@ impl<'i> Parser<'i> {
break;
}
// Currently we have only HAS_ATTR as a postfix operator.
// Currently we have only `HAS_ATTR` as a postfix operator.
assert_eq!(tok, T![?]);
self.start_node_at(cp, HAS_ATTR);
self.bump(); // `?`
@ -442,7 +447,7 @@ impl<'i> Parser<'i> {
self.finish_node();
// Yes, this is weird, but Nix parse `or` immediately after a non-select atom expression,
// and construct a Apply node, with higher priority than left-associative Apply.
// and construct a `Apply` node, with higher priority than left-associative Apply.
// `a b or c` => `(a (b or)) c`
} else if self.peek_non_ws() == Some(T![or]) {
self.start_node_at(cp, APPLY);
@ -456,7 +461,7 @@ impl<'i> Parser<'i> {
/// Atom level expression (highest priority).
/// Maybe consume nothing.
fn expr_atom_opt(&mut self) {
// This must matches SyntaxKind::can_start_atom_expr.
// This must matches `SyntaxKind::can_start_atom_expr`.
match self.peek_non_ws() {
Some(IDENT) => {
self.start_node(REF);
@ -585,7 +590,7 @@ impl<'i> Parser<'i> {
}
}
/// Always consume some tokens and make a PAT node.
/// Always consume some tokens and make a `PAT` node.
fn pat(&mut self) {
assert_eq!(self.peek(), Some(T!['{']));
self.start_node(PAT);
@ -640,7 +645,7 @@ impl<'i> Parser<'i> {
self.finish_node();
}
/// Maybe consume tokens and maybe make many INHERIT or ATTR_PATH_VALUE nodes,
/// Maybe consume tokens and maybe make many `INHERIT` or `ATTR_PATH_VALUE` nodes,
/// and must consume the guard token or reaching EOF.
fn bindings_until(&mut self, guard: SyntaxKind) {
loop {
@ -721,7 +726,7 @@ impl<'i> Parser<'i> {
}
}
/// Maybe consume tokens and always make a ATTR_PATH node.
/// Maybe consume tokens and always make a `ATTR_PATH` node.
fn attrpath_opt(&mut self) {
self.start_node(ATTR_PATH);
self.attr_opt(true);
@ -735,7 +740,7 @@ impl<'i> Parser<'i> {
/// Maybe consume tokens and always make a {IDENT,DYNAMIC,STRING} node.
/// If `force_name` is true, an empty NAME node would be created when the next token is unexpected.
fn attr_opt(&mut self, force_name: bool) {
// This must matches SyntaxKind::can_start_attr.
// This must matches `SyntaxKind::can_start_attr`.
match self.peek_non_ws() {
Some(IDENT | T![or]) => {
self.start_node(NAME);
@ -754,7 +759,7 @@ impl<'i> Parser<'i> {
}
}
/// Must consume tokens and always make a DYNAMIC node.
/// Must consume tokens and always make a `DYNAMIC` node.
fn dynamic(&mut self) {
assert_eq!(self.peek(), Some(T!["${"]));
self.start_node(DYNAMIC);
@ -764,7 +769,7 @@ impl<'i> Parser<'i> {
self.finish_node();
}
/// Must consume tokens and always make a STRING or INDENT_STRING node.
/// Must consume tokens and always make a `STRING` or `INDENT_STRING` node.
fn string(&mut self, node: SyntaxKind) {
assert!(matches!(self.peek(), Some(T!['"'] | T!["''"])));
self.start_node(node);
@ -797,12 +802,12 @@ impl<'i> Parser<'i> {
}
impl SyntaxKind {
// This must matches Parser::attr_opt.
// This must matches `Parser::attr_opt`.
fn can_start_attr(self) -> bool {
matches!(self, T!["${"] | T!['"'] | T![or] | IDENT)
}
// This must matches Parser::expr_atom_opt.
// This must matches `Parser::expr_atom_opt`.
fn can_start_atom_expr(self) -> bool {
matches!(
self,
@ -825,7 +830,7 @@ impl SyntaxKind {
/// Check if a token can start an expression. Only used for error recovery.
fn can_start_expr(self) -> bool {
// Should match Parser::expr_function_opt
// Should match `Parser::expr_function_opt`.
// Checked in can_start_atom_expr: T![let] | T![rec] | T!['{'] | IDENT
self.can_start_atom_expr()
|| self.prefix_bp().is_some()

View File

@ -33,7 +33,9 @@ pub fn escape_literal_attr(name: &str) -> Cow<'_, str> {
/// Unescape a single string escape sequence.
///
/// The input should be from `StringPart::Escape` produced by the parser.
/// # Panics
/// The input must be from `StringPart::Escape` produced by the parser.
/// It will panic for unrecognized escape.
pub fn unescape_string_escape(escape: &str) -> &str {
match escape {
"''$" => "$",
@ -90,12 +92,12 @@ pub enum UnescapedStringPart<'a> {
Dynamic(ast::Dynamic),
}
/// Calculate the minimal indentation of an IndentString.
/// Calculate the minimal indentation of an `IndentString`.
/// Or returns `usize::MAX` if all lines are empty.
///
/// See:
/// - https://github.com/NixOS/nix/blob/2.11.0/src/libexpr/parser.y#L195
/// - https://github.com/NixOS/nix/blob/2.11.0/src/libexpr/lexer.l#L204
/// - <https://github.com/NixOS/nix/blob/2.11.0/src/libexpr/lexer.l#L204>
/// - <https://github.com/NixOS/nix/blob/2.11.0/src/libexpr/parser.y#L195>
pub fn common_indent_of(n: &ast::IndentString) -> usize {
let mut ret = usize::MAX;
let mut counter = Some(0usize);

View File

@ -51,7 +51,10 @@
clippyFlags = lib.concatStringsSep " " [
"-D" "warnings"
"-D" "clippy::dbg_macro"
"-D" "clippy::doc_markdown"
"-D" "clippy::missing-panics-doc"
"-D" "clippy::semicolon_if_nothing_returned"
"-D" "clippy::todo"
];