Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 5 additions & 4 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,8 +1,9 @@
/target
.idea
.cursor
dhat-heap.json
.claude

# MCP, 用于为AI提供lsp上下文
.serena
# AI
.cursor
.claude
.codex
openspec
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ pub struct FileGenericIndex {
generic_params: Vec<TagGenericParams>,
root_node_ids: Vec<GenericEffectId>,
effect_nodes: Vec<GenericEffectRangeNode>,
pending_type_params: Vec<GenericParam>,
}

impl FileGenericIndex {
Expand All @@ -17,6 +18,7 @@ impl FileGenericIndex {
generic_params: Vec::new(),
root_node_ids: Vec::new(),
effect_nodes: Vec::new(),
pending_type_params: Vec::new(),
}
}

Expand Down Expand Up @@ -76,6 +78,14 @@ impl FileGenericIndex {
}
}

pub fn append_pending_type_param(&mut self, param: GenericParam) {
self.pending_type_params.push(param);
}

pub fn clear_pending_type_params(&mut self) {
self.pending_type_params.clear();
}

fn get_start(&self, ranges: &[TextRange]) -> Option<usize> {
let params_ids = self.find_generic_params(ranges.first()?.start())?;
let mut start = 0;
Expand Down Expand Up @@ -134,22 +144,33 @@ impl FileGenericIndex {
position: TextSize,
name: &str,
) -> Option<(GenericTplId, Option<LuaType>)> {
let params_ids = self.find_generic_params(position)?;

for params_id in params_ids.iter().rev() {
if let Some(params) = self.generic_params.get(*params_id)
&& let Some((id, param)) = params.params.get(name)
{
let tpl_id = if params.is_func {
GenericTplId::Func(*id as u32)
} else {
GenericTplId::Type(*id as u32)
};
return Some((tpl_id, param.type_constraint.clone()));
if let Some(params_ids) = self.find_generic_params(position) {
for params_id in params_ids.iter().rev() {
if let Some(params) = self.generic_params.get(*params_id)
&& let Some((id, param)) = params.params.get(name)
{
let tpl_id = if params.is_func {
GenericTplId::Func(*id as u32)
} else {
GenericTplId::Type(*id as u32)
};
return Some((tpl_id, param.type_constraint.clone()));
}
}
}

None
// 搜索前置类型参数, 例如 ---@alias Pick<T, K extends keyof T>
self.pending_type_params
.iter()
.enumerate()
.rev()
.find(|(_, param)| param.name == name)
.map(|(idx, param)| {
(
GenericTplId::Type(idx as u32),
param.type_constraint.clone(),
)
})
}

fn find_generic_params(&self, position: TextSize) -> Option<Vec<usize>> {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -200,19 +200,27 @@ fn get_generic_params(
analyzer: &mut DocAnalyzer,
params: LuaDocGenericDeclList,
) -> Vec<GenericParam> {
analyzer.generic_index.clear_pending_type_params();
let mut params_result = Vec::new();
for param in params.get_generic_decl() {
let name = if let Some(param) = param.get_name_token() {
SmolStr::new(param.get_name_text())
} else {
continue;
};
let type_ref = param

let type_constraint = param
.get_type()
.map(|type_ref| infer_type(analyzer, type_ref));

params_result.push(GenericParam::new(name, type_ref, None));
let param = GenericParam::new(name, type_constraint, None);
analyzer
.generic_index
.append_pending_type_param(param.clone());

params_result.push(param);
}
analyzer.generic_index.clear_pending_type_params();

params_result
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -228,4 +228,30 @@ mod test {
// so that variadic spreading continues to work as expected
assert_eq!(ws.humanize_type(v_ty), "string");
}

#[test]
fn test_issue_925() {
let mut ws = VirtualWorkspace::new();
ws.def(
r#"
---@class Test<T>
local M = {}

---@generic T
---@param value T
---@return Test<T extends Test<infer U> and U or T>
function M.with_dot(value) end
"#,
);
ws.def(
r#"
---@type Test<integer>
local a
A = a.with_dot(1)
"#,
);

let a_ty = ws.expr_ty("A");
assert_eq!(ws.humanize_type(a_ty), "Test<integer>");
}
}
21 changes: 21 additions & 0 deletions crates/emmylua_code_analysis/src/compilation/test/generic_test.rs
Original file line number Diff line number Diff line change
Expand Up @@ -847,4 +847,25 @@ mod test {
"#,
));
}

#[test]
fn test_issue_986() {
let mut ws = VirtualWorkspace::new();
ws.def(
r#"
---@class Foo
---@field cost number

---@generic K extends keyof Foo
---@param key K
---@return Foo[K]
function get(key)
end

A = get('cost')
"#,
);
let result_ty = ws.expr_ty("A");
assert_eq!(ws.humanize_type(result_ty), "number");
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -34,4 +34,16 @@ mod test {
"#,
));
}

#[test]
fn test_issue_925() {
let mut ws = VirtualWorkspace::new();

assert!(ws.check_code_for(
DiagnosticCode::TypeNotFound,
r#"
---@alias Pick<T, K extends keyof T> { [P in K]: T[P]; }
"#,
));
}
}
2 changes: 1 addition & 1 deletion crates/emmylua_code_analysis/src/db_index/module/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -397,7 +397,7 @@ impl LuaModuleIndex {

pub fn next_library_workspace_id(&self) -> u32 {
let used: HashSet<u32> = self.workspaces.iter().map(|w| w.id.id).collect();
let mut candidate = 2;
let mut candidate = WorkspaceId::LIBRARY_START.id;
while used.contains(&candidate) {
candidate += 1;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,9 +22,10 @@ impl WorkspaceId {
pub const STD: WorkspaceId = WorkspaceId { id: 0 };
pub const MAIN: WorkspaceId = WorkspaceId { id: 1 };
pub const REMOTE: WorkspaceId = WorkspaceId { id: 2 };
pub const LIBRARY_START: WorkspaceId = WorkspaceId { id: 3 };

pub fn is_library(&self) -> bool {
self.id > 2
self.id >= Self::LIBRARY_START.id
}

pub fn is_remote(&self) -> bool {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -60,6 +60,12 @@ pub fn instantiate_alias_call(
return LuaType::Unknown;
}

if operands[0].contain_tpl() || operands[1].contain_tpl() {
return LuaType::Call(
LuaAliasCallType::new(LuaAliasCallKind::Extends, operands).into(),
);
}

let compact = type_check::check_type_compact(db, &operands[0], &operands[1]).is_ok();
LuaType::BooleanConst(compact)
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -493,21 +493,24 @@ fn instantiate_conditional(
}
}

// infer 必须位于条件语句中(right), 判断是否包含并收集
if contains_conditional_infer(&right)
&& collect_infer_assignments(db, &left, &right, &mut infer_assignments)
{
condition_result = Some(true);
} else {
condition_result = Some(
check_type_compact_with_level(
db,
&left,
&right,
TypeCheckCheckLevel::GenericConditional,
)
.is_ok(),
);
// 仍有未解析模板时不能提前折叠 conditional, 否则会把 infer 结果固定成悬空占位符.
if !left.contain_tpl() && !right.contain_tpl() {
// infer 必须位于条件语句中(right), 判断是否包含并收集
if contains_conditional_infer(&right)
&& collect_infer_assignments(db, &left, &right, &mut infer_assignments)
{
condition_result = Some(true);
} else {
condition_result = Some(
check_type_compact_with_level(
db,
&left,
&right,
TypeCheckCheckLevel::GenericConditional,
)
.is_ok(),
);
}
}
}

Expand Down
71 changes: 70 additions & 1 deletion crates/emmylua_parser/src/grammar/doc/test.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3215,7 +3215,7 @@ Syntax(Chunk)@0..110
let code = r#"
---@alias ConstructorParameters<T> T extends new (fun(...: infer P): any) and P or never
"#;
print_ast(code);
// print_ast(code);
let result = r#"
Syntax(Chunk)@0..106
Syntax(Block)@0..106
Expand Down Expand Up @@ -3424,4 +3424,73 @@ Syntax(Chunk)@0..60

assert_ast_eq!(code, result);
}

#[test]
fn test_type_index_access() {
let code = r#"
---@type T[1]
---@type T["a"]
---@type T[A]
---@type T[]
"#;
// print_ast(code);
let result = r#"
Syntax(Chunk)@0..98
Syntax(Block)@0..98
Token(TkEndOfLine)@0..1 "\n"
Token(TkWhitespace)@1..9 " "
Syntax(Comment)@9..89
Token(TkDocStart)@9..13 "---@"
Syntax(DocTagType)@13..22
Token(TkTagType)@13..17 "type"
Token(TkWhitespace)@17..18 " "
Syntax(TypeIndexAccess)@18..22
Syntax(TypeName)@18..19
Token(TkName)@18..19 "T"
Token(TkLeftBracket)@19..20 "["
Syntax(TypeLiteral)@20..21
Token(TkInt)@20..21 "1"
Token(TkRightBracket)@21..22 "]"
Token(TkEndOfLine)@22..23 "\n"
Token(TkWhitespace)@23..31 " "
Token(TkDocStart)@31..35 "---@"
Syntax(DocTagType)@35..46
Token(TkTagType)@35..39 "type"
Token(TkWhitespace)@39..40 " "
Syntax(TypeIndexAccess)@40..46
Syntax(TypeName)@40..41
Token(TkName)@40..41 "T"
Token(TkLeftBracket)@41..42 "["
Syntax(TypeLiteral)@42..45
Token(TkString)@42..45 "\"a\""
Token(TkRightBracket)@45..46 "]"
Token(TkEndOfLine)@46..47 "\n"
Token(TkWhitespace)@47..55 " "
Token(TkDocStart)@55..59 "---@"
Syntax(DocTagType)@59..68
Token(TkTagType)@59..63 "type"
Token(TkWhitespace)@63..64 " "
Syntax(TypeIndexAccess)@64..68
Syntax(TypeName)@64..65
Token(TkName)@64..65 "T"
Token(TkLeftBracket)@65..66 "["
Syntax(TypeName)@66..67
Token(TkName)@66..67 "A"
Token(TkRightBracket)@67..68 "]"
Token(TkEndOfLine)@68..69 "\n"
Token(TkWhitespace)@69..77 " "
Token(TkDocStart)@77..81 "---@"
Syntax(DocTagType)@81..89
Token(TkTagType)@81..85 "type"
Token(TkWhitespace)@85..86 " "
Syntax(TypeArray)@86..89
Syntax(TypeName)@86..87
Token(TkName)@86..87 "T"
Token(TkLeftBracket)@87..88 "["
Token(TkRightBracket)@88..89 "]"
Token(TkEndOfLine)@89..90 "\n"
Token(TkWhitespace)@90..98 " "
"#;
assert_ast_eq!(code, result);
}
}
11 changes: 3 additions & 8 deletions crates/emmylua_parser/src/grammar/doc/types.rs
Original file line number Diff line number Diff line change
Expand Up @@ -527,17 +527,12 @@ fn parse_suffixed_type(p: &mut LuaDocParser, cm: CompleteMarker) -> DocParseResu
LuaTokenKind::TkLeftBracket => {
let mut m = cm.precede(p, LuaSyntaxKind::TypeArray);
p.bump();
if p.state == LuaDocParserState::Mapped {
if p.current_token() != LuaTokenKind::TkRightBracket {
m.set_kind(p, LuaSyntaxKind::TypeIndexAccess);
parse_type(p)?;
}
} else if matches!(
if matches!(
p.current_token(),
LuaTokenKind::TkString | LuaTokenKind::TkInt | LuaTokenKind::TkName
) {
m.set_kind(p, LuaSyntaxKind::IndexExpr);
p.bump();
m.set_kind(p, LuaSyntaxKind::TypeIndexAccess);
parse_type(p)?;
}

expect_token(p, LuaTokenKind::TkRightBracket)?;
Expand Down
Loading