Reorganize code into individual tool files
This commit is contained in:
555
ast.go
555
ast.go
@@ -1,555 +0,0 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"go/ast"
|
||||
"go/parser"
|
||||
"go/token"
|
||||
"io/fs"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// Position represents a location in source code
|
||||
type Position struct {
|
||||
File string `json:"file"`
|
||||
Line int `json:"line"`
|
||||
Column int `json:"column"`
|
||||
Offset int `json:"offset"` // byte offset in file
|
||||
}
|
||||
|
||||
// newPosition creates a Position from a token.Position
|
||||
func newPosition(pos token.Position) Position {
|
||||
return Position{
|
||||
File: pos.Filename,
|
||||
Line: pos.Line,
|
||||
Column: pos.Column,
|
||||
Offset: pos.Offset,
|
||||
}
|
||||
}
|
||||
|
||||
type Symbol struct {
|
||||
Name string `json:"name"`
|
||||
Type string `json:"type"`
|
||||
Package string `json:"package"`
|
||||
Exported bool `json:"exported"`
|
||||
Position Position `json:"position"`
|
||||
}
|
||||
|
||||
type TypeInfo struct {
|
||||
Name string `json:"name"`
|
||||
Package string `json:"package"`
|
||||
Kind string `json:"kind"`
|
||||
Position Position `json:"position"`
|
||||
Fields []FieldInfo `json:"fields,omitempty"`
|
||||
Methods []MethodInfo `json:"methods,omitempty"`
|
||||
Embedded []string `json:"embedded,omitempty"`
|
||||
Interface []MethodInfo `json:"interface,omitempty"`
|
||||
Underlying string `json:"underlying,omitempty"`
|
||||
}
|
||||
|
||||
type FieldInfo struct {
|
||||
Name string `json:"name"`
|
||||
Type string `json:"type"`
|
||||
Tag string `json:"tag,omitempty"`
|
||||
Exported bool `json:"exported"`
|
||||
Position Position `json:"position"`
|
||||
}
|
||||
|
||||
type MethodInfo struct {
|
||||
Name string `json:"name"`
|
||||
Signature string `json:"signature"`
|
||||
Receiver string `json:"receiver,omitempty"`
|
||||
Exported bool `json:"exported"`
|
||||
Position Position `json:"position"`
|
||||
}
|
||||
|
||||
type Reference struct {
|
||||
Context string `json:"context"`
|
||||
Kind string `json:"kind"`
|
||||
Position Position `json:"position"`
|
||||
}
|
||||
|
||||
type Package struct {
|
||||
ImportPath string `json:"import_path"`
|
||||
Name string `json:"name"`
|
||||
Dir string `json:"dir"`
|
||||
GoFiles []string `json:"go_files"`
|
||||
Imports []string `json:"imports"`
|
||||
}
|
||||
|
||||
type fileVisitor func(path string, src []byte, file *ast.File, fset *token.FileSet) error
|
||||
|
||||
func walkGoFiles(dir string, visitor fileVisitor) error {
|
||||
fset := token.NewFileSet()
|
||||
|
||||
return filepath.WalkDir(dir, func(path string, d fs.DirEntry, err error) error {
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if d.IsDir() || !strings.HasSuffix(path, ".go") || strings.Contains(path, "vendor/") {
|
||||
return nil
|
||||
}
|
||||
|
||||
src, err := os.ReadFile(path)
|
||||
if err != nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
file, err := parser.ParseFile(fset, path, src, parser.ParseComments)
|
||||
if err != nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
return visitor(path, src, file, fset)
|
||||
})
|
||||
}
|
||||
|
||||
func findSymbols(dir string, pattern string) ([]Symbol, error) {
|
||||
var symbols []Symbol
|
||||
|
||||
err := walkGoFiles(dir, func(path string, src []byte, file *ast.File, fset *token.FileSet) error {
|
||||
if strings.HasSuffix(path, "_test.go") && !strings.Contains(pattern, "Test") {
|
||||
return nil
|
||||
}
|
||||
|
||||
pkgName := file.Name.Name
|
||||
|
||||
ast.Inspect(file, func(n ast.Node) bool {
|
||||
switch decl := n.(type) {
|
||||
case *ast.FuncDecl:
|
||||
name := decl.Name.Name
|
||||
if matchesPattern(name, pattern) {
|
||||
pos := fset.Position(decl.Pos())
|
||||
symbols = append(symbols, Symbol{
|
||||
Name: name,
|
||||
Type: "function",
|
||||
Package: pkgName,
|
||||
Exported: ast.IsExported(name),
|
||||
Position: newPosition(pos),
|
||||
})
|
||||
}
|
||||
|
||||
case *ast.GenDecl:
|
||||
for _, spec := range decl.Specs {
|
||||
switch s := spec.(type) {
|
||||
case *ast.TypeSpec:
|
||||
name := s.Name.Name
|
||||
if matchesPattern(name, pattern) {
|
||||
pos := fset.Position(s.Pos())
|
||||
kind := "type"
|
||||
switch s.Type.(type) {
|
||||
case *ast.InterfaceType:
|
||||
kind = "interface"
|
||||
case *ast.StructType:
|
||||
kind = "struct"
|
||||
}
|
||||
symbols = append(symbols, Symbol{
|
||||
Name: name,
|
||||
Type: kind,
|
||||
Package: pkgName,
|
||||
Exported: ast.IsExported(name),
|
||||
Position: newPosition(pos),
|
||||
})
|
||||
}
|
||||
|
||||
case *ast.ValueSpec:
|
||||
for _, name := range s.Names {
|
||||
if matchesPattern(name.Name, pattern) {
|
||||
pos := fset.Position(name.Pos())
|
||||
kind := "variable"
|
||||
if decl.Tok == token.CONST {
|
||||
kind = "constant"
|
||||
}
|
||||
symbols = append(symbols, Symbol{
|
||||
Name: name.Name,
|
||||
Type: kind,
|
||||
Package: pkgName,
|
||||
Exported: ast.IsExported(name.Name),
|
||||
Position: newPosition(pos),
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return true
|
||||
})
|
||||
|
||||
return nil
|
||||
})
|
||||
|
||||
return symbols, err
|
||||
}
|
||||
|
||||
func getTypeInfo(dir string, typeName string) (*TypeInfo, error) {
|
||||
var result *TypeInfo
|
||||
|
||||
err := walkGoFiles(dir, func(path string, src []byte, file *ast.File, fset *token.FileSet) error {
|
||||
if result != nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
ast.Inspect(file, func(n ast.Node) bool {
|
||||
if result != nil {
|
||||
return false
|
||||
}
|
||||
|
||||
switch decl := n.(type) {
|
||||
case *ast.GenDecl:
|
||||
for _, spec := range decl.Specs {
|
||||
if ts, ok := spec.(*ast.TypeSpec); ok && ts.Name.Name == typeName {
|
||||
pos := fset.Position(ts.Pos())
|
||||
info := &TypeInfo{
|
||||
Name: typeName,
|
||||
Package: file.Name.Name,
|
||||
Position: newPosition(pos),
|
||||
}
|
||||
|
||||
switch t := ts.Type.(type) {
|
||||
case *ast.StructType:
|
||||
info.Kind = "struct"
|
||||
info.Fields = extractFields(t, fset)
|
||||
info.Embedded = extractEmbedded(t)
|
||||
|
||||
case *ast.InterfaceType:
|
||||
info.Kind = "interface"
|
||||
info.Interface = extractInterfaceMethods(t, fset)
|
||||
|
||||
case *ast.Ident:
|
||||
info.Kind = "alias"
|
||||
info.Underlying = t.Name
|
||||
|
||||
case *ast.SelectorExpr:
|
||||
info.Kind = "alias"
|
||||
if x, ok := t.X.(*ast.Ident); ok {
|
||||
info.Underlying = x.Name + "." + t.Sel.Name
|
||||
}
|
||||
|
||||
default:
|
||||
info.Kind = "other"
|
||||
}
|
||||
|
||||
info.Methods = extractMethods(file, typeName, fset)
|
||||
result = info
|
||||
return false
|
||||
}
|
||||
}
|
||||
}
|
||||
return true
|
||||
})
|
||||
|
||||
return nil
|
||||
})
|
||||
|
||||
if result == nil && err == nil {
|
||||
return nil, fmt.Errorf("type %s not found", typeName)
|
||||
}
|
||||
|
||||
return result, err
|
||||
}
|
||||
|
||||
func findReferences(dir string, symbol string) ([]Reference, error) {
|
||||
var refs []Reference
|
||||
|
||||
err := walkGoFiles(dir, func(path string, src []byte, file *ast.File, fset *token.FileSet) error {
|
||||
|
||||
ast.Inspect(file, func(n ast.Node) bool {
|
||||
switch node := n.(type) {
|
||||
case *ast.Ident:
|
||||
if node.Name == symbol {
|
||||
pos := fset.Position(node.Pos())
|
||||
kind := identifyReferenceKind(node)
|
||||
context := extractContext(src, pos)
|
||||
|
||||
refs = append(refs, Reference{
|
||||
Context: context,
|
||||
Kind: kind,
|
||||
Position: newPosition(pos),
|
||||
})
|
||||
}
|
||||
|
||||
case *ast.SelectorExpr:
|
||||
if node.Sel.Name == symbol {
|
||||
pos := fset.Position(node.Sel.Pos())
|
||||
context := extractContext(src, pos)
|
||||
|
||||
refs = append(refs, Reference{
|
||||
Context: context,
|
||||
Kind: "selector",
|
||||
Position: newPosition(pos),
|
||||
})
|
||||
}
|
||||
}
|
||||
return true
|
||||
})
|
||||
|
||||
return nil
|
||||
})
|
||||
|
||||
return refs, err
|
||||
}
|
||||
|
||||
func listPackages(dir string, includeTests bool) ([]Package, error) {
|
||||
packages := make(map[string]*Package)
|
||||
|
||||
err := walkGoFiles(dir, func(path string, src []byte, file *ast.File, fset *token.FileSet) error {
|
||||
// Skip test files if not requested
|
||||
if !includeTests && strings.HasSuffix(path, "_test.go") {
|
||||
return nil
|
||||
}
|
||||
|
||||
pkgDir := filepath.Dir(path)
|
||||
|
||||
// Initialize package if not seen before
|
||||
if _, exists := packages[pkgDir]; !exists {
|
||||
importPath := strings.TrimPrefix(pkgDir, dir)
|
||||
importPath = strings.TrimPrefix(importPath, "/")
|
||||
if importPath == "" {
|
||||
importPath = "."
|
||||
}
|
||||
|
||||
packages[pkgDir] = &Package{
|
||||
ImportPath: importPath,
|
||||
Name: file.Name.Name,
|
||||
Dir: pkgDir,
|
||||
GoFiles: []string{},
|
||||
Imports: []string{},
|
||||
}
|
||||
}
|
||||
|
||||
// Add file to package
|
||||
fileName := filepath.Base(path)
|
||||
packages[pkgDir].GoFiles = append(packages[pkgDir].GoFiles, fileName)
|
||||
|
||||
// Collect unique imports
|
||||
imports := make(map[string]bool)
|
||||
for _, imp := range file.Imports {
|
||||
importPath := strings.Trim(imp.Path.Value, `"`)
|
||||
imports[importPath] = true
|
||||
}
|
||||
|
||||
// Merge imports into package
|
||||
existingImports := make(map[string]bool)
|
||||
for _, imp := range packages[pkgDir].Imports {
|
||||
existingImports[imp] = true
|
||||
}
|
||||
for imp := range imports {
|
||||
if !existingImports[imp] {
|
||||
packages[pkgDir].Imports = append(packages[pkgDir].Imports, imp)
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var result []Package
|
||||
for _, pkg := range packages {
|
||||
result = append(result, *pkg)
|
||||
}
|
||||
|
||||
return result, nil
|
||||
}
|
||||
|
||||
func matchesPattern(name, pattern string) bool {
|
||||
if pattern == "" {
|
||||
return true
|
||||
}
|
||||
pattern = strings.ToLower(pattern)
|
||||
name = strings.ToLower(name)
|
||||
return strings.Contains(name, pattern)
|
||||
}
|
||||
|
||||
func extractFields(st *ast.StructType, fset *token.FileSet) []FieldInfo {
|
||||
var fields []FieldInfo
|
||||
|
||||
for _, field := range st.Fields.List {
|
||||
fieldType := exprToString(field.Type)
|
||||
tag := ""
|
||||
if field.Tag != nil {
|
||||
tag = field.Tag.Value
|
||||
}
|
||||
|
||||
if len(field.Names) == 0 {
|
||||
pos := fset.Position(field.Pos())
|
||||
fields = append(fields, FieldInfo{
|
||||
Name: "",
|
||||
Type: fieldType,
|
||||
Tag: tag,
|
||||
Exported: true,
|
||||
Position: newPosition(pos),
|
||||
})
|
||||
} else {
|
||||
for _, name := range field.Names {
|
||||
pos := fset.Position(name.Pos())
|
||||
fields = append(fields, FieldInfo{
|
||||
Name: name.Name,
|
||||
Type: fieldType,
|
||||
Tag: tag,
|
||||
Exported: ast.IsExported(name.Name),
|
||||
Position: newPosition(pos),
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return fields
|
||||
}
|
||||
|
||||
func extractEmbedded(st *ast.StructType) []string {
|
||||
var embedded []string
|
||||
|
||||
for _, field := range st.Fields.List {
|
||||
if len(field.Names) == 0 {
|
||||
embedded = append(embedded, exprToString(field.Type))
|
||||
}
|
||||
}
|
||||
|
||||
return embedded
|
||||
}
|
||||
|
||||
func extractInterfaceMethods(it *ast.InterfaceType, fset *token.FileSet) []MethodInfo {
|
||||
var methods []MethodInfo
|
||||
|
||||
for _, method := range it.Methods.List {
|
||||
if len(method.Names) > 0 {
|
||||
for _, name := range method.Names {
|
||||
sig := exprToString(method.Type)
|
||||
pos := fset.Position(name.Pos())
|
||||
methods = append(methods, MethodInfo{
|
||||
Name: name.Name,
|
||||
Signature: sig,
|
||||
Exported: ast.IsExported(name.Name),
|
||||
Position: newPosition(pos),
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return methods
|
||||
}
|
||||
|
||||
func extractMethods(file *ast.File, typeName string, fset *token.FileSet) []MethodInfo {
|
||||
var methods []MethodInfo
|
||||
|
||||
for _, decl := range file.Decls {
|
||||
if fn, ok := decl.(*ast.FuncDecl); ok && fn.Recv != nil {
|
||||
for _, recv := range fn.Recv.List {
|
||||
recvType := exprToString(recv.Type)
|
||||
if strings.Contains(recvType, typeName) {
|
||||
sig := funcSignature(fn.Type)
|
||||
pos := fset.Position(fn.Name.Pos())
|
||||
methods = append(methods, MethodInfo{
|
||||
Name: fn.Name.Name,
|
||||
Signature: sig,
|
||||
Receiver: recvType,
|
||||
Exported: ast.IsExported(fn.Name.Name),
|
||||
Position: newPosition(pos),
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return methods
|
||||
}
|
||||
|
||||
func exprToString(expr ast.Expr) string {
|
||||
switch e := expr.(type) {
|
||||
case *ast.Ident:
|
||||
return e.Name
|
||||
case *ast.StarExpr:
|
||||
return "*" + exprToString(e.X)
|
||||
case *ast.SelectorExpr:
|
||||
return exprToString(e.X) + "." + e.Sel.Name
|
||||
case *ast.ArrayType:
|
||||
if e.Len == nil {
|
||||
return "[]" + exprToString(e.Elt)
|
||||
}
|
||||
return "[" + exprToString(e.Len) + "]" + exprToString(e.Elt)
|
||||
case *ast.MapType:
|
||||
return "map[" + exprToString(e.Key) + "]" + exprToString(e.Value)
|
||||
case *ast.InterfaceType:
|
||||
if len(e.Methods.List) == 0 {
|
||||
return "interface{}"
|
||||
}
|
||||
return "interface{...}"
|
||||
case *ast.FuncType:
|
||||
return funcSignature(e)
|
||||
case *ast.ChanType:
|
||||
switch e.Dir {
|
||||
case ast.SEND:
|
||||
return "chan<- " + exprToString(e.Value)
|
||||
case ast.RECV:
|
||||
return "<-chan " + exprToString(e.Value)
|
||||
default:
|
||||
return "chan " + exprToString(e.Value)
|
||||
}
|
||||
case *ast.BasicLit:
|
||||
return e.Value
|
||||
default:
|
||||
return fmt.Sprintf("%T", expr)
|
||||
}
|
||||
}
|
||||
|
||||
func funcSignature(fn *ast.FuncType) string {
|
||||
params := fieldListToString(fn.Params)
|
||||
results := fieldListToString(fn.Results)
|
||||
|
||||
if results == "" {
|
||||
return fmt.Sprintf("func(%s)", params)
|
||||
}
|
||||
return fmt.Sprintf("func(%s) %s", params, results)
|
||||
}
|
||||
|
||||
func fieldListToString(fl *ast.FieldList) string {
|
||||
if fl == nil || len(fl.List) == 0 {
|
||||
return ""
|
||||
}
|
||||
|
||||
var parts []string
|
||||
for _, field := range fl.List {
|
||||
fieldType := exprToString(field.Type)
|
||||
if len(field.Names) == 0 {
|
||||
parts = append(parts, fieldType)
|
||||
} else {
|
||||
for _, name := range field.Names {
|
||||
parts = append(parts, name.Name+" "+fieldType)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if len(parts) == 1 && !strings.Contains(parts[0], " ") {
|
||||
return parts[0]
|
||||
}
|
||||
return "(" + strings.Join(parts, ", ") + ")"
|
||||
}
|
||||
|
||||
func identifyReferenceKind(ident *ast.Ident) string {
|
||||
return "identifier"
|
||||
}
|
||||
|
||||
func extractContext(src []byte, pos token.Position) string {
|
||||
lines := strings.Split(string(src), "\n")
|
||||
if pos.Line <= 0 || pos.Line > len(lines) {
|
||||
return ""
|
||||
}
|
||||
|
||||
start := pos.Line - 2
|
||||
if start < 0 {
|
||||
start = 0
|
||||
}
|
||||
end := pos.Line + 1
|
||||
if end > len(lines) {
|
||||
end = len(lines)
|
||||
}
|
||||
|
||||
context := strings.Join(lines[start:end], "\n")
|
||||
return strings.TrimSpace(context)
|
||||
}
|
||||
1121
ast_advanced.go
1121
ast_advanced.go
File diff suppressed because it is too large
Load Diff
904
ast_extended.go
904
ast_extended.go
@@ -1,904 +0,0 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"go/ast"
|
||||
"go/token"
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// Import analysis types
|
||||
type ImportInfo struct {
|
||||
Package string `json:"package"`
|
||||
File string `json:"file"`
|
||||
Imports []ImportDetail `json:"imports"`
|
||||
UnusedImports []string `json:"unused_imports,omitempty"`
|
||||
}
|
||||
|
||||
type ImportDetail struct {
|
||||
Path string `json:"path"`
|
||||
Alias string `json:"alias,omitempty"`
|
||||
Used []string `json:"used_symbols,omitempty"`
|
||||
Position Position `json:"position"`
|
||||
}
|
||||
|
||||
// Function call types
|
||||
type FunctionCall struct {
|
||||
Caller string `json:"caller"`
|
||||
Context string `json:"context"`
|
||||
Position Position `json:"position"`
|
||||
}
|
||||
|
||||
// Struct usage types
|
||||
type StructUsage struct {
|
||||
File string `json:"file"`
|
||||
Literals []StructLiteral `json:"literals,omitempty"`
|
||||
FieldAccess []FieldAccess `json:"field_access,omitempty"`
|
||||
TypeUsage []TypeUsage `json:"type_usage,omitempty"`
|
||||
}
|
||||
|
||||
type StructLiteral struct {
|
||||
Fields []string `json:"fields_initialized"`
|
||||
IsComposite bool `json:"is_composite"`
|
||||
Position Position `json:"position"`
|
||||
}
|
||||
|
||||
type FieldAccess struct {
|
||||
Field string `json:"field"`
|
||||
Context string `json:"context"`
|
||||
Position Position `json:"position"`
|
||||
}
|
||||
|
||||
type TypeUsage struct {
|
||||
Usage string `json:"usage"`
|
||||
Position Position `json:"position"`
|
||||
}
|
||||
|
||||
// Interface analysis types
|
||||
type InterfaceInfo struct {
|
||||
Name string `json:"name"`
|
||||
Package string `json:"package"`
|
||||
Position Position `json:"position"`
|
||||
Methods []MethodInfo `json:"methods"`
|
||||
Implementations []ImplementationType `json:"implementations,omitempty"`
|
||||
}
|
||||
|
||||
type ImplementationType struct {
|
||||
Type string `json:"type"`
|
||||
Package string `json:"package"`
|
||||
Position Position `json:"position"`
|
||||
}
|
||||
|
||||
// Error handling types
|
||||
type ErrorInfo struct {
|
||||
File string `json:"file"`
|
||||
UnhandledErrors []ErrorContext `json:"unhandled_errors,omitempty"`
|
||||
ErrorChecks []ErrorContext `json:"error_checks,omitempty"`
|
||||
ErrorReturns []ErrorContext `json:"error_returns,omitempty"`
|
||||
}
|
||||
|
||||
type ErrorContext struct {
|
||||
Context string `json:"context"`
|
||||
Type string `json:"type"`
|
||||
Position Position `json:"position"`
|
||||
}
|
||||
|
||||
// Test analysis types
|
||||
type TestAnalysis struct {
|
||||
TestFiles []TestFile `json:"test_files"`
|
||||
ExportedFunctions []ExportedFunc `json:"exported_functions"`
|
||||
TestCoverage TestCoverage `json:"coverage_summary"`
|
||||
}
|
||||
|
||||
type TestFile struct {
|
||||
File string `json:"file"`
|
||||
Package string `json:"package"`
|
||||
Tests []string `json:"tests"`
|
||||
Benchmarks []string `json:"benchmarks,omitempty"`
|
||||
Examples []string `json:"examples,omitempty"`
|
||||
}
|
||||
|
||||
type ExportedFunc struct {
|
||||
Name string `json:"name"`
|
||||
Package string `json:"package"`
|
||||
Tested bool `json:"tested"`
|
||||
Position Position `json:"position"`
|
||||
}
|
||||
|
||||
type TestCoverage struct {
|
||||
TotalExported int `json:"total_exported"`
|
||||
TotalTested int `json:"total_tested"`
|
||||
Percentage float64 `json:"percentage"`
|
||||
}
|
||||
|
||||
// Comment analysis types
|
||||
type CommentInfo struct {
|
||||
File string `json:"file"`
|
||||
TODOs []CommentItem `json:"todos,omitempty"`
|
||||
Undocumented []CommentItem `json:"undocumented,omitempty"`
|
||||
}
|
||||
|
||||
type CommentItem struct {
|
||||
Name string `json:"name"`
|
||||
Comment string `json:"comment,omitempty"`
|
||||
Type string `json:"type"`
|
||||
Position Position `json:"position"`
|
||||
}
|
||||
|
||||
// Dependency analysis types
|
||||
type DependencyInfo struct {
|
||||
Package string `json:"package"`
|
||||
Dir string `json:"dir"`
|
||||
Dependencies []string `json:"dependencies"`
|
||||
Dependents []string `json:"dependents,omitempty"`
|
||||
Cycles [][]string `json:"cycles,omitempty"`
|
||||
}
|
||||
|
||||
// Generic types
|
||||
type GenericInfo struct {
|
||||
Name string `json:"name"`
|
||||
Kind string `json:"kind"`
|
||||
Package string `json:"package"`
|
||||
Position Position `json:"position"`
|
||||
TypeParams []TypeParam `json:"type_params"`
|
||||
Instances []Instance `json:"instances,omitempty"`
|
||||
}
|
||||
|
||||
type TypeParam struct {
|
||||
Name string `json:"name"`
|
||||
Constraint string `json:"constraint"`
|
||||
Position Position `json:"position"`
|
||||
}
|
||||
|
||||
type Instance struct {
|
||||
Types []string `json:"types"`
|
||||
Position Position `json:"position"`
|
||||
}
|
||||
|
||||
func findImports(dir string) ([]ImportInfo, error) {
|
||||
var imports []ImportInfo
|
||||
|
||||
err := walkGoFiles(dir, func(path string, src []byte, file *ast.File, fset *token.FileSet) error {
|
||||
info := ImportInfo{
|
||||
Package: file.Name.Name,
|
||||
File: path,
|
||||
Imports: []ImportDetail{},
|
||||
}
|
||||
|
||||
// Collect all imports
|
||||
importMap := make(map[string]*ImportDetail)
|
||||
for _, imp := range file.Imports {
|
||||
importPath := strings.Trim(imp.Path.Value, `"`)
|
||||
pos := fset.Position(imp.Pos())
|
||||
detail := &ImportDetail{
|
||||
Path: importPath,
|
||||
Position: newPosition(pos),
|
||||
}
|
||||
if imp.Name != nil {
|
||||
detail.Alias = imp.Name.Name
|
||||
}
|
||||
importMap[importPath] = detail
|
||||
info.Imports = append(info.Imports, *detail)
|
||||
}
|
||||
|
||||
// Track which imports are used
|
||||
usedImports := make(map[string]map[string]bool)
|
||||
ast.Inspect(file, func(n ast.Node) bool {
|
||||
switch x := n.(type) {
|
||||
case *ast.SelectorExpr:
|
||||
if ident, ok := x.X.(*ast.Ident); ok {
|
||||
pkgName := ident.Name
|
||||
symbol := x.Sel.Name
|
||||
|
||||
// Find matching import
|
||||
for importPath, detail := range importMap {
|
||||
importName := filepath.Base(importPath)
|
||||
if detail.Alias != "" && detail.Alias == pkgName {
|
||||
if usedImports[importPath] == nil {
|
||||
usedImports[importPath] = make(map[string]bool)
|
||||
}
|
||||
usedImports[importPath][symbol] = true
|
||||
} else if importName == pkgName {
|
||||
if usedImports[importPath] == nil {
|
||||
usedImports[importPath] = make(map[string]bool)
|
||||
}
|
||||
usedImports[importPath][symbol] = true
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return true
|
||||
})
|
||||
|
||||
// Update import details with used symbols
|
||||
for i, imp := range info.Imports {
|
||||
if used, ok := usedImports[imp.Path]; ok {
|
||||
for symbol := range used {
|
||||
info.Imports[i].Used = append(info.Imports[i].Used, symbol)
|
||||
}
|
||||
} else if !strings.HasSuffix(imp.Path, "_test") && imp.Alias != "_" {
|
||||
info.UnusedImports = append(info.UnusedImports, imp.Path)
|
||||
}
|
||||
}
|
||||
|
||||
if len(info.Imports) > 0 {
|
||||
imports = append(imports, info)
|
||||
}
|
||||
return nil
|
||||
})
|
||||
|
||||
return imports, err
|
||||
}
|
||||
|
||||
func findFunctionCalls(dir string, functionName string) ([]FunctionCall, error) {
|
||||
var calls []FunctionCall
|
||||
|
||||
err := walkGoFiles(dir, func(path string, src []byte, file *ast.File, fset *token.FileSet) error {
|
||||
currentFunc := ""
|
||||
|
||||
ast.Inspect(file, func(n ast.Node) bool {
|
||||
// Track current function context
|
||||
if fn, ok := n.(*ast.FuncDecl); ok {
|
||||
currentFunc = fn.Name.Name
|
||||
return true
|
||||
}
|
||||
|
||||
// Find function calls
|
||||
switch x := n.(type) {
|
||||
case *ast.CallExpr:
|
||||
var calledName string
|
||||
switch fun := x.Fun.(type) {
|
||||
case *ast.Ident:
|
||||
calledName = fun.Name
|
||||
case *ast.SelectorExpr:
|
||||
calledName = fun.Sel.Name
|
||||
}
|
||||
|
||||
if calledName == functionName {
|
||||
pos := fset.Position(x.Pos())
|
||||
context := extractContext(src, pos)
|
||||
|
||||
calls = append(calls, FunctionCall{
|
||||
Caller: currentFunc,
|
||||
Context: context,
|
||||
Position: newPosition(pos),
|
||||
})
|
||||
}
|
||||
}
|
||||
return true
|
||||
})
|
||||
|
||||
return nil
|
||||
})
|
||||
|
||||
return calls, err
|
||||
}
|
||||
|
||||
func findStructUsage(dir string, structName string) ([]StructUsage, error) {
|
||||
var usages []StructUsage
|
||||
|
||||
err := walkGoFiles(dir, func(path string, src []byte, file *ast.File, fset *token.FileSet) error {
|
||||
usage := StructUsage{
|
||||
File: path,
|
||||
}
|
||||
|
||||
ast.Inspect(file, func(n ast.Node) bool {
|
||||
switch x := n.(type) {
|
||||
// Find struct literals
|
||||
case *ast.CompositeLit:
|
||||
if typeName := getTypeName(x.Type); typeName == structName {
|
||||
pos := fset.Position(x.Pos())
|
||||
lit := StructLiteral{
|
||||
IsComposite: len(x.Elts) > 0,
|
||||
Position: newPosition(pos),
|
||||
}
|
||||
|
||||
// Extract initialized fields
|
||||
for _, elt := range x.Elts {
|
||||
if kv, ok := elt.(*ast.KeyValueExpr); ok {
|
||||
if ident, ok := kv.Key.(*ast.Ident); ok {
|
||||
lit.Fields = append(lit.Fields, ident.Name)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
usage.Literals = append(usage.Literals, lit)
|
||||
}
|
||||
|
||||
// Find field access
|
||||
case *ast.SelectorExpr:
|
||||
if typeName := getTypeName(x.X); strings.Contains(typeName, structName) {
|
||||
pos := fset.Position(x.Sel.Pos())
|
||||
context := extractContext(src, pos)
|
||||
|
||||
usage.FieldAccess = append(usage.FieldAccess, FieldAccess{
|
||||
Field: x.Sel.Name,
|
||||
Context: context,
|
||||
Position: newPosition(pos),
|
||||
})
|
||||
}
|
||||
|
||||
// Find type usage in declarations
|
||||
case *ast.Field:
|
||||
if typeName := getTypeName(x.Type); typeName == structName {
|
||||
pos := fset.Position(x.Pos())
|
||||
usage.TypeUsage = append(usage.TypeUsage, TypeUsage{
|
||||
Usage: "field",
|
||||
Position: newPosition(pos),
|
||||
})
|
||||
}
|
||||
}
|
||||
return true
|
||||
})
|
||||
|
||||
if len(usage.Literals) > 0 || len(usage.FieldAccess) > 0 || len(usage.TypeUsage) > 0 {
|
||||
usages = append(usages, usage)
|
||||
}
|
||||
return nil
|
||||
})
|
||||
|
||||
return usages, err
|
||||
}
|
||||
|
||||
func extractInterfaces(dir string, interfaceName string) ([]InterfaceInfo, error) {
|
||||
var interfaces []InterfaceInfo
|
||||
interfaceMap := make(map[string]*InterfaceInfo)
|
||||
|
||||
// First pass: collect all interfaces
|
||||
err := walkGoFiles(dir, func(path string, src []byte, file *ast.File, fset *token.FileSet) error {
|
||||
ast.Inspect(file, func(n ast.Node) bool {
|
||||
if genDecl, ok := n.(*ast.GenDecl); ok {
|
||||
for _, spec := range genDecl.Specs {
|
||||
if typeSpec, ok := spec.(*ast.TypeSpec); ok {
|
||||
if iface, ok := typeSpec.Type.(*ast.InterfaceType); ok {
|
||||
name := typeSpec.Name.Name
|
||||
if interfaceName == "" || name == interfaceName {
|
||||
pos := fset.Position(typeSpec.Pos())
|
||||
info := &InterfaceInfo{
|
||||
Name: name,
|
||||
Package: file.Name.Name,
|
||||
Position: newPosition(pos),
|
||||
Methods: extractInterfaceMethods(iface, fset),
|
||||
}
|
||||
interfaceMap[name] = info
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return true
|
||||
})
|
||||
return nil
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Second pass: find implementations
|
||||
if interfaceName != "" {
|
||||
iface, exists := interfaceMap[interfaceName]
|
||||
if exists {
|
||||
err = walkGoFiles(dir, func(path string, src []byte, file *ast.File, fset *token.FileSet) error {
|
||||
// Collect all types with methods
|
||||
types := make(map[string][]string)
|
||||
|
||||
for _, decl := range file.Decls {
|
||||
if fn, ok := decl.(*ast.FuncDecl); ok && fn.Recv != nil {
|
||||
for _, recv := range fn.Recv.List {
|
||||
typeName := getTypeName(recv.Type)
|
||||
types[typeName] = append(types[typeName], fn.Name.Name)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Check if any type implements the interface
|
||||
for typeName, methods := range types {
|
||||
if implementsInterface(methods, iface.Methods) {
|
||||
// Find type declaration
|
||||
ast.Inspect(file, func(n ast.Node) bool {
|
||||
if genDecl, ok := n.(*ast.GenDecl); ok {
|
||||
for _, spec := range genDecl.Specs {
|
||||
if typeSpec, ok := spec.(*ast.TypeSpec); ok && typeSpec.Name.Name == typeName {
|
||||
pos := fset.Position(typeSpec.Pos())
|
||||
iface.Implementations = append(iface.Implementations, ImplementationType{
|
||||
Type: typeName,
|
||||
Package: file.Name.Name,
|
||||
Position: newPosition(pos),
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
return true
|
||||
})
|
||||
}
|
||||
}
|
||||
return nil
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// Convert map to slice
|
||||
for _, iface := range interfaceMap {
|
||||
interfaces = append(interfaces, *iface)
|
||||
}
|
||||
|
||||
return interfaces, err
|
||||
}
|
||||
|
||||
func findErrors(dir string) ([]ErrorInfo, error) {
|
||||
var errors []ErrorInfo
|
||||
|
||||
err := walkGoFiles(dir, func(path string, src []byte, file *ast.File, fset *token.FileSet) error {
|
||||
info := ErrorInfo{
|
||||
File: path,
|
||||
}
|
||||
|
||||
ast.Inspect(file, func(n ast.Node) bool {
|
||||
switch x := n.(type) {
|
||||
// Find function calls that return errors but aren't checked
|
||||
case *ast.ExprStmt:
|
||||
if call, ok := x.X.(*ast.CallExpr); ok {
|
||||
// Check if this function likely returns an error
|
||||
if returnsError(call, file) {
|
||||
pos := fset.Position(call.Pos())
|
||||
context := extractContext(src, pos)
|
||||
info.UnhandledErrors = append(info.UnhandledErrors, ErrorContext{
|
||||
Context: context,
|
||||
Type: "unchecked_call",
|
||||
Position: newPosition(pos),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// Find error checks
|
||||
case *ast.IfStmt:
|
||||
if isErrorCheck(x) {
|
||||
pos := fset.Position(x.Pos())
|
||||
context := extractContext(src, pos)
|
||||
info.ErrorChecks = append(info.ErrorChecks, ErrorContext{
|
||||
Context: context,
|
||||
Type: "error_check",
|
||||
Position: newPosition(pos),
|
||||
})
|
||||
}
|
||||
|
||||
// Find error returns
|
||||
case *ast.ReturnStmt:
|
||||
for _, result := range x.Results {
|
||||
if ident, ok := result.(*ast.Ident); ok && (ident.Name == "err" || strings.Contains(ident.Name, "error")) {
|
||||
pos := fset.Position(x.Pos())
|
||||
context := extractContext(src, pos)
|
||||
info.ErrorReturns = append(info.ErrorReturns, ErrorContext{
|
||||
Context: context,
|
||||
Type: "error_return",
|
||||
Position: newPosition(pos),
|
||||
})
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
return true
|
||||
})
|
||||
|
||||
if len(info.UnhandledErrors) > 0 || len(info.ErrorChecks) > 0 || len(info.ErrorReturns) > 0 {
|
||||
errors = append(errors, info)
|
||||
}
|
||||
return nil
|
||||
})
|
||||
|
||||
return errors, err
|
||||
}
|
||||
|
||||
func analyzeTests(dir string) (*TestAnalysis, error) {
|
||||
analysis := &TestAnalysis{
|
||||
TestFiles: []TestFile{},
|
||||
ExportedFunctions: []ExportedFunc{},
|
||||
}
|
||||
|
||||
// Collect all exported functions
|
||||
exportedFuncs := make(map[string]*ExportedFunc)
|
||||
|
||||
err := walkGoFiles(dir, func(path string, src []byte, file *ast.File, fset *token.FileSet) error {
|
||||
if strings.HasSuffix(path, "_test.go") {
|
||||
// Process test files
|
||||
testFile := TestFile{
|
||||
File: path,
|
||||
Package: file.Name.Name,
|
||||
}
|
||||
|
||||
for _, decl := range file.Decls {
|
||||
if fn, ok := decl.(*ast.FuncDecl); ok {
|
||||
name := fn.Name.Name
|
||||
if strings.HasPrefix(name, "Test") {
|
||||
testFile.Tests = append(testFile.Tests, name)
|
||||
} else if strings.HasPrefix(name, "Benchmark") {
|
||||
testFile.Benchmarks = append(testFile.Benchmarks, name)
|
||||
} else if strings.HasPrefix(name, "Example") {
|
||||
testFile.Examples = append(testFile.Examples, name)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if len(testFile.Tests) > 0 || len(testFile.Benchmarks) > 0 || len(testFile.Examples) > 0 {
|
||||
analysis.TestFiles = append(analysis.TestFiles, testFile)
|
||||
}
|
||||
} else {
|
||||
// Collect exported functions
|
||||
for _, decl := range file.Decls {
|
||||
if fn, ok := decl.(*ast.FuncDecl); ok && ast.IsExported(fn.Name.Name) {
|
||||
key := file.Name.Name + "." + fn.Name.Name
|
||||
pos := fset.Position(fn.Pos())
|
||||
exportedFuncs[key] = &ExportedFunc{
|
||||
Name: fn.Name.Name,
|
||||
Package: file.Name.Name,
|
||||
Tested: false,
|
||||
Position: newPosition(pos),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Check which functions are tested
|
||||
for _, testFile := range analysis.TestFiles {
|
||||
for _, testName := range testFile.Tests {
|
||||
// Simple heuristic: TestFunctionName tests FunctionName
|
||||
funcName := strings.TrimPrefix(testName, "Test")
|
||||
key := testFile.Package + "." + funcName
|
||||
if fn, exists := exportedFuncs[key]; exists {
|
||||
fn.Tested = true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Convert map to slice and calculate coverage
|
||||
tested := 0
|
||||
for _, fn := range exportedFuncs {
|
||||
analysis.ExportedFunctions = append(analysis.ExportedFunctions, *fn)
|
||||
if fn.Tested {
|
||||
tested++
|
||||
}
|
||||
}
|
||||
|
||||
analysis.TestCoverage = TestCoverage{
|
||||
TotalExported: len(exportedFuncs),
|
||||
TotalTested: tested,
|
||||
}
|
||||
if len(exportedFuncs) > 0 {
|
||||
analysis.TestCoverage.Percentage = float64(tested) / float64(len(exportedFuncs)) * 100
|
||||
}
|
||||
|
||||
return analysis, nil
|
||||
}
|
||||
|
||||
func findComments(dir string, commentType string) ([]CommentInfo, error) {
|
||||
var comments []CommentInfo
|
||||
|
||||
err := walkGoFiles(dir, func(path string, src []byte, file *ast.File, fset *token.FileSet) error {
|
||||
info := CommentInfo{
|
||||
File: path,
|
||||
}
|
||||
|
||||
// Find TODOs in comments
|
||||
if commentType == "todo" || commentType == "all" {
|
||||
todoRegex := regexp.MustCompile(`(?i)\b(todo|fixme|hack|bug|xxx)\b`)
|
||||
for _, cg := range file.Comments {
|
||||
for _, c := range cg.List {
|
||||
if todoRegex.MatchString(c.Text) {
|
||||
pos := fset.Position(c.Pos())
|
||||
info.TODOs = append(info.TODOs, CommentItem{
|
||||
Comment: c.Text,
|
||||
Type: "todo",
|
||||
Position: newPosition(pos),
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Find undocumented exported symbols
|
||||
if commentType == "undocumented" || commentType == "all" {
|
||||
ast.Inspect(file, func(n ast.Node) bool {
|
||||
switch x := n.(type) {
|
||||
case *ast.FuncDecl:
|
||||
if ast.IsExported(x.Name.Name) && x.Doc == nil {
|
||||
pos := fset.Position(x.Pos())
|
||||
info.Undocumented = append(info.Undocumented, CommentItem{
|
||||
Name: x.Name.Name,
|
||||
Type: "function",
|
||||
Position: newPosition(pos),
|
||||
})
|
||||
}
|
||||
case *ast.GenDecl:
|
||||
for _, spec := range x.Specs {
|
||||
switch s := spec.(type) {
|
||||
case *ast.TypeSpec:
|
||||
if ast.IsExported(s.Name.Name) && x.Doc == nil && s.Doc == nil {
|
||||
pos := fset.Position(s.Pos())
|
||||
info.Undocumented = append(info.Undocumented, CommentItem{
|
||||
Name: s.Name.Name,
|
||||
Type: "type",
|
||||
Position: newPosition(pos),
|
||||
})
|
||||
}
|
||||
case *ast.ValueSpec:
|
||||
for _, name := range s.Names {
|
||||
if ast.IsExported(name.Name) && x.Doc == nil && s.Doc == nil {
|
||||
pos := fset.Position(name.Pos())
|
||||
info.Undocumented = append(info.Undocumented, CommentItem{
|
||||
Name: name.Name,
|
||||
Type: "value",
|
||||
Position: newPosition(pos),
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return true
|
||||
})
|
||||
}
|
||||
|
||||
if len(info.TODOs) > 0 || len(info.Undocumented) > 0 {
|
||||
comments = append(comments, info)
|
||||
}
|
||||
return nil
|
||||
})
|
||||
|
||||
return comments, err
|
||||
}
|
||||
|
||||
func analyzeDependencies(dir string) ([]DependencyInfo, error) {
|
||||
depMap := make(map[string]*DependencyInfo)
|
||||
|
||||
// First pass: collect all packages and their imports
|
||||
err := walkGoFiles(dir, func(path string, src []byte, file *ast.File, fset *token.FileSet) error {
|
||||
pkgDir := filepath.Dir(path)
|
||||
if _, exists := depMap[pkgDir]; !exists {
|
||||
depMap[pkgDir] = &DependencyInfo{
|
||||
Package: file.Name.Name,
|
||||
Dir: pkgDir,
|
||||
Dependencies: []string{},
|
||||
}
|
||||
}
|
||||
|
||||
// Add imports
|
||||
for _, imp := range file.Imports {
|
||||
importPath := strings.Trim(imp.Path.Value, `"`)
|
||||
if !contains(depMap[pkgDir].Dependencies, importPath) {
|
||||
depMap[pkgDir].Dependencies = append(depMap[pkgDir].Dependencies, importPath)
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Build dependency graph and find cycles
|
||||
var deps []DependencyInfo
|
||||
for _, dep := range depMap {
|
||||
// Find internal dependencies
|
||||
for _, imp := range dep.Dependencies {
|
||||
// Check if this is an internal package
|
||||
for otherDir, otherDep := range depMap {
|
||||
if strings.HasSuffix(imp, otherDep.Package) && otherDir != dep.Dir {
|
||||
otherDep.Dependents = append(otherDep.Dependents, dep.Package)
|
||||
}
|
||||
}
|
||||
}
|
||||
deps = append(deps, *dep)
|
||||
}
|
||||
|
||||
// Simple cycle detection (could be enhanced)
|
||||
for i := range deps {
|
||||
deps[i].Cycles = findCycles(&deps[i], depMap)
|
||||
}
|
||||
|
||||
return deps, nil
|
||||
}
|
||||
|
||||
func findGenerics(dir string) ([]GenericInfo, error) {
|
||||
var generics []GenericInfo
|
||||
|
||||
err := walkGoFiles(dir, func(path string, src []byte, file *ast.File, fset *token.FileSet) error {
|
||||
ast.Inspect(file, func(n ast.Node) bool {
|
||||
switch x := n.(type) {
|
||||
case *ast.GenDecl:
|
||||
for _, spec := range x.Specs {
|
||||
if ts, ok := spec.(*ast.TypeSpec); ok && ts.TypeParams != nil {
|
||||
pos := fset.Position(ts.Pos())
|
||||
info := GenericInfo{
|
||||
Name: ts.Name.Name,
|
||||
Kind: "type",
|
||||
Package: file.Name.Name,
|
||||
Position: newPosition(pos),
|
||||
}
|
||||
|
||||
// Extract type parameters
|
||||
for _, param := range ts.TypeParams.List {
|
||||
for _, name := range param.Names {
|
||||
namePos := fset.Position(name.Pos())
|
||||
tp := TypeParam{
|
||||
Name: name.Name,
|
||||
Position: newPosition(namePos),
|
||||
}
|
||||
if param.Type != nil {
|
||||
tp.Constraint = exprToString(param.Type)
|
||||
}
|
||||
info.TypeParams = append(info.TypeParams, tp)
|
||||
}
|
||||
}
|
||||
|
||||
generics = append(generics, info)
|
||||
}
|
||||
}
|
||||
|
||||
case *ast.FuncDecl:
|
||||
if x.Type.TypeParams != nil {
|
||||
pos := fset.Position(x.Pos())
|
||||
info := GenericInfo{
|
||||
Name: x.Name.Name,
|
||||
Kind: "function",
|
||||
Package: file.Name.Name,
|
||||
Position: newPosition(pos),
|
||||
}
|
||||
|
||||
// Extract type parameters
|
||||
for _, param := range x.Type.TypeParams.List {
|
||||
for _, name := range param.Names {
|
||||
namePos := fset.Position(name.Pos())
|
||||
tp := TypeParam{
|
||||
Name: name.Name,
|
||||
Position: newPosition(namePos),
|
||||
}
|
||||
if param.Type != nil {
|
||||
tp.Constraint = exprToString(param.Type)
|
||||
}
|
||||
info.TypeParams = append(info.TypeParams, tp)
|
||||
}
|
||||
}
|
||||
|
||||
generics = append(generics, info)
|
||||
}
|
||||
}
|
||||
return true
|
||||
})
|
||||
return nil
|
||||
})
|
||||
|
||||
return generics, err
|
||||
}
|
||||
|
||||
// Helper functions
|
||||
|
||||
func getTypeName(expr ast.Expr) string {
|
||||
switch x := expr.(type) {
|
||||
case *ast.Ident:
|
||||
return x.Name
|
||||
case *ast.StarExpr:
|
||||
return getTypeName(x.X)
|
||||
case *ast.SelectorExpr:
|
||||
return exprToString(x)
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func implementsInterface(methods []string, interfaceMethods []MethodInfo) bool {
|
||||
for _, im := range interfaceMethods {
|
||||
found := false
|
||||
for _, m := range methods {
|
||||
if m == im.Name {
|
||||
found = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if !found {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
func returnsError(call *ast.CallExpr, file *ast.File) bool {
|
||||
// Simple heuristic: check if the function name suggests it returns an error
|
||||
switch fun := call.Fun.(type) {
|
||||
case *ast.Ident:
|
||||
name := fun.Name
|
||||
return strings.HasPrefix(name, "New") || strings.HasPrefix(name, "Create") ||
|
||||
strings.HasPrefix(name, "Open") || strings.HasPrefix(name, "Read") ||
|
||||
strings.HasPrefix(name, "Write") || strings.HasPrefix(name, "Parse")
|
||||
case *ast.SelectorExpr:
|
||||
name := fun.Sel.Name
|
||||
return strings.HasPrefix(name, "New") || strings.HasPrefix(name, "Create") ||
|
||||
strings.HasPrefix(name, "Open") || strings.HasPrefix(name, "Read") ||
|
||||
strings.HasPrefix(name, "Write") || strings.HasPrefix(name, "Parse")
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func isErrorCheck(ifStmt *ast.IfStmt) bool {
|
||||
// Check if this is an "if err != nil" pattern
|
||||
if binExpr, ok := ifStmt.Cond.(*ast.BinaryExpr); ok {
|
||||
if binExpr.Op == token.NEQ {
|
||||
if ident, ok := binExpr.X.(*ast.Ident); ok && (ident.Name == "err" || strings.Contains(ident.Name, "error")) {
|
||||
if ident2, ok := binExpr.Y.(*ast.Ident); ok && ident2.Name == "nil" {
|
||||
return true
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func contains(slice []string, str string) bool {
|
||||
for _, s := range slice {
|
||||
if s == str {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func findCycles(dep *DependencyInfo, depMap map[string]*DependencyInfo) [][]string {
|
||||
// Simple DFS-based cycle detection
|
||||
var cycles [][]string
|
||||
visited := make(map[string]bool)
|
||||
recStack := make(map[string]bool)
|
||||
path := []string{}
|
||||
|
||||
var dfs func(pkg string) bool
|
||||
dfs = func(pkg string) bool {
|
||||
visited[pkg] = true
|
||||
recStack[pkg] = true
|
||||
path = append(path, pkg)
|
||||
|
||||
// Find dependencies for this package
|
||||
for _, d := range depMap {
|
||||
if d.Package == pkg {
|
||||
for _, imp := range d.Dependencies {
|
||||
for _, otherDep := range depMap {
|
||||
if strings.HasSuffix(imp, otherDep.Package) {
|
||||
if !visited[otherDep.Package] {
|
||||
if dfs(otherDep.Package) {
|
||||
return true
|
||||
}
|
||||
} else if recStack[otherDep.Package] {
|
||||
// Found a cycle
|
||||
cycleStart := -1
|
||||
for i, p := range path {
|
||||
if p == otherDep.Package {
|
||||
cycleStart = i
|
||||
break
|
||||
}
|
||||
}
|
||||
if cycleStart >= 0 {
|
||||
cycle := append([]string{}, path[cycleStart:]...)
|
||||
cycles = append(cycles, cycle)
|
||||
}
|
||||
return true
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
path = path[:len(path)-1]
|
||||
recStack[pkg] = false
|
||||
return false
|
||||
}
|
||||
|
||||
dfs(dep.Package)
|
||||
return cycles
|
||||
}
|
||||
173
common.go
Normal file
173
common.go
Normal file
@@ -0,0 +1,173 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"go/ast"
|
||||
"go/parser"
|
||||
"go/token"
|
||||
"io/fs"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// Position represents a location in source code
|
||||
type Position struct {
|
||||
File string `json:"file"`
|
||||
Line int `json:"line"`
|
||||
Column int `json:"column"`
|
||||
Offset int `json:"offset"` // byte offset in file
|
||||
}
|
||||
|
||||
// newPosition creates a Position from a token.Position
|
||||
func newPosition(pos token.Position) Position {
|
||||
return Position{
|
||||
File: pos.Filename,
|
||||
Line: pos.Line,
|
||||
Column: pos.Column,
|
||||
Offset: pos.Offset,
|
||||
}
|
||||
}
|
||||
|
||||
type fileVisitor func(path string, src []byte, file *ast.File, fset *token.FileSet) error
|
||||
|
||||
func walkGoFiles(dir string, visitor fileVisitor) error {
|
||||
fset := token.NewFileSet()
|
||||
|
||||
return filepath.WalkDir(dir, func(path string, d fs.DirEntry, err error) error {
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if d.IsDir() || !strings.HasSuffix(path, ".go") || strings.Contains(path, "vendor/") {
|
||||
return nil
|
||||
}
|
||||
|
||||
src, err := os.ReadFile(path)
|
||||
if err != nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
file, err := parser.ParseFile(fset, path, src, parser.ParseComments)
|
||||
if err != nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
return visitor(path, src, file, fset)
|
||||
})
|
||||
}
|
||||
|
||||
func exprToString(expr ast.Expr) string {
|
||||
switch e := expr.(type) {
|
||||
case *ast.Ident:
|
||||
return e.Name
|
||||
case *ast.StarExpr:
|
||||
return "*" + exprToString(e.X)
|
||||
case *ast.SelectorExpr:
|
||||
return exprToString(e.X) + "." + e.Sel.Name
|
||||
case *ast.ArrayType:
|
||||
if e.Len == nil {
|
||||
return "[]" + exprToString(e.Elt)
|
||||
}
|
||||
return "[" + exprToString(e.Len) + "]" + exprToString(e.Elt)
|
||||
case *ast.MapType:
|
||||
return "map[" + exprToString(e.Key) + "]" + exprToString(e.Value)
|
||||
case *ast.InterfaceType:
|
||||
if len(e.Methods.List) == 0 {
|
||||
return "interface{}"
|
||||
}
|
||||
return "interface{...}"
|
||||
case *ast.FuncType:
|
||||
return funcSignature(e)
|
||||
case *ast.ChanType:
|
||||
switch e.Dir {
|
||||
case ast.SEND:
|
||||
return "chan<- " + exprToString(e.Value)
|
||||
case ast.RECV:
|
||||
return "<-chan " + exprToString(e.Value)
|
||||
default:
|
||||
return "chan " + exprToString(e.Value)
|
||||
}
|
||||
case *ast.BasicLit:
|
||||
return e.Value
|
||||
default:
|
||||
return "unknown"
|
||||
}
|
||||
}
|
||||
|
||||
func funcSignature(fn *ast.FuncType) string {
|
||||
params := fieldListToString(fn.Params)
|
||||
results := fieldListToString(fn.Results)
|
||||
|
||||
if results == "" {
|
||||
return "func(" + params + ")"
|
||||
}
|
||||
return "func(" + params + ") " + results
|
||||
}
|
||||
|
||||
func fieldListToString(fl *ast.FieldList) string {
|
||||
if fl == nil || len(fl.List) == 0 {
|
||||
return ""
|
||||
}
|
||||
|
||||
var parts []string
|
||||
for _, field := range fl.List {
|
||||
fieldType := exprToString(field.Type)
|
||||
if len(field.Names) == 0 {
|
||||
parts = append(parts, fieldType)
|
||||
} else {
|
||||
for _, name := range field.Names {
|
||||
parts = append(parts, name.Name+" "+fieldType)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if len(parts) == 1 && !strings.Contains(parts[0], " ") {
|
||||
return parts[0]
|
||||
}
|
||||
return "(" + strings.Join(parts, ", ") + ")"
|
||||
}
|
||||
|
||||
func extractContext(src []byte, pos token.Position) string {
|
||||
lines := strings.Split(string(src), "\n")
|
||||
if pos.Line <= 0 || pos.Line > len(lines) {
|
||||
return ""
|
||||
}
|
||||
|
||||
start := pos.Line - 2
|
||||
if start < 0 {
|
||||
start = 0
|
||||
}
|
||||
end := pos.Line + 1
|
||||
if end > len(lines) {
|
||||
end = len(lines)
|
||||
}
|
||||
|
||||
context := strings.Join(lines[start:end], "\n")
|
||||
return strings.TrimSpace(context)
|
||||
}
|
||||
|
||||
func extractDocString(doc *ast.CommentGroup) string {
|
||||
if doc == nil {
|
||||
return ""
|
||||
}
|
||||
var text strings.Builder
|
||||
for _, comment := range doc.List {
|
||||
text.WriteString(strings.TrimPrefix(comment.Text, "//"))
|
||||
text.WriteString(" ")
|
||||
}
|
||||
return strings.TrimSpace(text.String())
|
||||
}
|
||||
|
||||
func isErrorCheck(ifStmt *ast.IfStmt) bool {
|
||||
// Check if this is an "if err != nil" pattern
|
||||
if binExpr, ok := ifStmt.Cond.(*ast.BinaryExpr); ok {
|
||||
if binExpr.Op == token.NEQ {
|
||||
if ident, ok := binExpr.X.(*ast.Ident); ok && (ident.Name == "err" || strings.Contains(ident.Name, "error")) {
|
||||
if ident2, ok := binExpr.Y.(*ast.Ident); ok && ident2.Name == "nil" {
|
||||
return true
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
45
tool_analyze_architecture.go
Normal file
45
tool_analyze_architecture.go
Normal file
@@ -0,0 +1,45 @@
|
||||
package main
|
||||
|
||||
// Architecture analysis types
|
||||
type ArchitectureInfo struct {
|
||||
Layers []LayerInfo `json:"layers"`
|
||||
Violations []LayerViolation `json:"violations,omitempty"`
|
||||
Suggestions []string `json:"suggestions,omitempty"`
|
||||
}
|
||||
|
||||
type LayerInfo struct {
|
||||
Name string `json:"name"`
|
||||
Packages []string `json:"packages"`
|
||||
Dependencies []string `json:"dependencies"`
|
||||
}
|
||||
|
||||
type LayerViolation struct {
|
||||
From string `json:"from"`
|
||||
To string `json:"to"`
|
||||
Violation string `json:"violation"`
|
||||
Position Position `json:"position"`
|
||||
}
|
||||
|
||||
func analyzeArchitecture(dir string) (*ArchitectureInfo, error) {
|
||||
// Simplified architecture analysis
|
||||
packages, err := listPackages(dir, false)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
arch := &ArchitectureInfo{
|
||||
Layers: []LayerInfo{},
|
||||
}
|
||||
|
||||
// Detect common Go project structure
|
||||
for _, pkg := range packages {
|
||||
layer := LayerInfo{
|
||||
Name: pkg.Name,
|
||||
Packages: []string{pkg.ImportPath},
|
||||
Dependencies: pkg.Imports,
|
||||
}
|
||||
arch.Layers = append(arch.Layers, layer)
|
||||
}
|
||||
|
||||
return arch, nil
|
||||
}
|
||||
39
tool_analyze_coupling.go
Normal file
39
tool_analyze_coupling.go
Normal file
@@ -0,0 +1,39 @@
|
||||
package main
|
||||
|
||||
// Coupling analysis types
|
||||
type CouplingInfo struct {
|
||||
Package string `json:"package"`
|
||||
Afferent int `json:"afferent"`
|
||||
Efferent int `json:"efferent"`
|
||||
Instability float64 `json:"instability"`
|
||||
Dependencies []string `json:"dependencies"`
|
||||
Dependents []string `json:"dependents"`
|
||||
Suggestions []string `json:"suggestions,omitempty"`
|
||||
}
|
||||
|
||||
func analyzeCoupling(dir string) ([]CouplingInfo, error) {
|
||||
var coupling []CouplingInfo
|
||||
|
||||
// This is a simplified implementation
|
||||
packages, err := listPackages(dir, false)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
for _, pkg := range packages {
|
||||
info := CouplingInfo{
|
||||
Package: pkg.Name,
|
||||
Dependencies: pkg.Imports,
|
||||
Efferent: len(pkg.Imports),
|
||||
}
|
||||
|
||||
// Calculate instability (Ce / (Ca + Ce))
|
||||
if info.Afferent+info.Efferent > 0 {
|
||||
info.Instability = float64(info.Efferent) / float64(info.Afferent+info.Efferent)
|
||||
}
|
||||
|
||||
coupling = append(coupling, info)
|
||||
}
|
||||
|
||||
return coupling, nil
|
||||
}
|
||||
132
tool_analyze_dependencies.go
Normal file
132
tool_analyze_dependencies.go
Normal file
@@ -0,0 +1,132 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"go/ast"
|
||||
"go/token"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// Dependency analysis types
|
||||
type DependencyInfo struct {
|
||||
Package string `json:"package"`
|
||||
Dir string `json:"dir"`
|
||||
Dependencies []string `json:"dependencies"`
|
||||
Dependents []string `json:"dependents,omitempty"`
|
||||
Cycles [][]string `json:"cycles,omitempty"`
|
||||
}
|
||||
|
||||
func analyzeDependencies(dir string) ([]DependencyInfo, error) {
|
||||
depMap := make(map[string]*DependencyInfo)
|
||||
|
||||
// First pass: collect all packages and their imports
|
||||
err := walkGoFiles(dir, func(path string, src []byte, file *ast.File, fset *token.FileSet) error {
|
||||
pkgDir := filepath.Dir(path)
|
||||
if _, exists := depMap[pkgDir]; !exists {
|
||||
depMap[pkgDir] = &DependencyInfo{
|
||||
Package: file.Name.Name,
|
||||
Dir: pkgDir,
|
||||
Dependencies: []string{},
|
||||
}
|
||||
}
|
||||
|
||||
// Add imports
|
||||
for _, imp := range file.Imports {
|
||||
importPath := strings.Trim(imp.Path.Value, `"`)
|
||||
if !contains(depMap[pkgDir].Dependencies, importPath) {
|
||||
depMap[pkgDir].Dependencies = append(depMap[pkgDir].Dependencies, importPath)
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Build dependency graph and find cycles
|
||||
var deps []DependencyInfo
|
||||
for _, dep := range depMap {
|
||||
// Find internal dependencies
|
||||
for _, imp := range dep.Dependencies {
|
||||
// Check if this is an internal package
|
||||
for otherDir, otherDep := range depMap {
|
||||
if strings.HasSuffix(imp, otherDep.Package) && otherDir != dep.Dir {
|
||||
otherDep.Dependents = append(otherDep.Dependents, dep.Package)
|
||||
}
|
||||
}
|
||||
}
|
||||
deps = append(deps, *dep)
|
||||
}
|
||||
|
||||
// Simple cycle detection (could be enhanced)
|
||||
for i := range deps {
|
||||
deps[i].Cycles = findCycles(&deps[i], depMap)
|
||||
}
|
||||
|
||||
return deps, nil
|
||||
}
|
||||
|
||||
func contains(slice []string, str string) bool {
|
||||
for _, s := range slice {
|
||||
if s == str {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func findCycles(dep *DependencyInfo, depMap map[string]*DependencyInfo) [][]string {
|
||||
// Simple DFS-based cycle detection
|
||||
var cycles [][]string
|
||||
visited := make(map[string]bool)
|
||||
recStack := make(map[string]bool)
|
||||
path := []string{}
|
||||
|
||||
var dfs func(pkg string) bool
|
||||
dfs = func(pkg string) bool {
|
||||
visited[pkg] = true
|
||||
recStack[pkg] = true
|
||||
path = append(path, pkg)
|
||||
|
||||
// Find dependencies for this package
|
||||
for _, d := range depMap {
|
||||
if d.Package == pkg {
|
||||
for _, imp := range d.Dependencies {
|
||||
for _, otherDep := range depMap {
|
||||
if strings.HasSuffix(imp, otherDep.Package) {
|
||||
if !visited[otherDep.Package] {
|
||||
if dfs(otherDep.Package) {
|
||||
return true
|
||||
}
|
||||
} else if recStack[otherDep.Package] {
|
||||
// Found a cycle
|
||||
cycleStart := -1
|
||||
for i, p := range path {
|
||||
if p == otherDep.Package {
|
||||
cycleStart = i
|
||||
break
|
||||
}
|
||||
}
|
||||
if cycleStart >= 0 {
|
||||
cycle := append([]string{}, path[cycleStart:]...)
|
||||
cycles = append(cycles, cycle)
|
||||
}
|
||||
return true
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
path = path[:len(path)-1]
|
||||
recStack[pkg] = false
|
||||
return false
|
||||
}
|
||||
|
||||
dfs(dep.Package)
|
||||
return cycles
|
||||
}
|
||||
94
tool_analyze_embedding.go
Normal file
94
tool_analyze_embedding.go
Normal file
@@ -0,0 +1,94 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"go/ast"
|
||||
"go/token"
|
||||
)
|
||||
|
||||
// Embedding analysis types
|
||||
type EmbeddingInfo struct {
|
||||
File string `json:"file"`
|
||||
Structs []StructEmbedding `json:"structs,omitempty"`
|
||||
Interfaces []InterfaceEmbedding `json:"interfaces,omitempty"`
|
||||
}
|
||||
|
||||
type StructEmbedding struct {
|
||||
Name string `json:"name"`
|
||||
Embedded []string `json:"embedded"`
|
||||
Methods []string `json:"promoted_methods"`
|
||||
Position Position `json:"position"`
|
||||
}
|
||||
|
||||
type InterfaceEmbedding struct {
|
||||
Name string `json:"name"`
|
||||
Embedded []string `json:"embedded"`
|
||||
Methods []string `json:"methods"`
|
||||
Position Position `json:"position"`
|
||||
}
|
||||
|
||||
func analyzeEmbedding(dir string) ([]EmbeddingInfo, error) {
|
||||
var embedding []EmbeddingInfo
|
||||
|
||||
err := walkGoFiles(dir, func(path string, src []byte, file *ast.File, fset *token.FileSet) error {
|
||||
info := EmbeddingInfo{
|
||||
File: path,
|
||||
}
|
||||
|
||||
ast.Inspect(file, func(n ast.Node) bool {
|
||||
switch decl := n.(type) {
|
||||
case *ast.GenDecl:
|
||||
for _, spec := range decl.Specs {
|
||||
if ts, ok := spec.(*ast.TypeSpec); ok {
|
||||
if st, ok := ts.Type.(*ast.StructType); ok {
|
||||
pos := fset.Position(ts.Pos())
|
||||
structEmb := StructEmbedding{
|
||||
Name: ts.Name.Name,
|
||||
Position: newPosition(pos),
|
||||
}
|
||||
|
||||
for _, field := range st.Fields.List {
|
||||
if len(field.Names) == 0 {
|
||||
structEmb.Embedded = append(structEmb.Embedded, exprToString(field.Type))
|
||||
}
|
||||
}
|
||||
|
||||
if len(structEmb.Embedded) > 0 {
|
||||
info.Structs = append(info.Structs, structEmb)
|
||||
}
|
||||
}
|
||||
|
||||
if it, ok := ts.Type.(*ast.InterfaceType); ok {
|
||||
pos := fset.Position(ts.Pos())
|
||||
ifaceEmb := InterfaceEmbedding{
|
||||
Name: ts.Name.Name,
|
||||
Position: newPosition(pos),
|
||||
}
|
||||
|
||||
for _, method := range it.Methods.List {
|
||||
if len(method.Names) == 0 {
|
||||
ifaceEmb.Embedded = append(ifaceEmb.Embedded, exprToString(method.Type))
|
||||
} else {
|
||||
for _, name := range method.Names {
|
||||
ifaceEmb.Methods = append(ifaceEmb.Methods, name.Name)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if len(ifaceEmb.Embedded) > 0 || len(ifaceEmb.Methods) > 0 {
|
||||
info.Interfaces = append(info.Interfaces, ifaceEmb)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return true
|
||||
})
|
||||
|
||||
if len(info.Structs) > 0 || len(info.Interfaces) > 0 {
|
||||
embedding = append(embedding, info)
|
||||
}
|
||||
return nil
|
||||
})
|
||||
|
||||
return embedding, err
|
||||
}
|
||||
78
tool_analyze_go_idioms.go
Normal file
78
tool_analyze_go_idioms.go
Normal file
@@ -0,0 +1,78 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"go/ast"
|
||||
"go/token"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// Go idioms types
|
||||
type IdiomsInfo struct {
|
||||
File string `json:"file"`
|
||||
Violations []IdiomItem `json:"violations,omitempty"`
|
||||
Suggestions []IdiomItem `json:"suggestions,omitempty"`
|
||||
}
|
||||
|
||||
type IdiomItem struct {
|
||||
Type string `json:"type"`
|
||||
Description string `json:"description"`
|
||||
Suggestion string `json:"suggestion"`
|
||||
Position Position `json:"position"`
|
||||
}
|
||||
|
||||
func analyzeGoIdioms(dir string) ([]IdiomsInfo, error) {
|
||||
var idioms []IdiomsInfo
|
||||
|
||||
err := walkGoFiles(dir, func(path string, src []byte, file *ast.File, fset *token.FileSet) error {
|
||||
info := IdiomsInfo{
|
||||
File: path,
|
||||
}
|
||||
|
||||
ast.Inspect(file, func(n ast.Node) bool {
|
||||
// Check for proper error handling
|
||||
if ifStmt, ok := n.(*ast.IfStmt); ok {
|
||||
if !isErrorCheck(ifStmt) {
|
||||
// Look for other patterns that might be non-idiomatic
|
||||
pos := fset.Position(ifStmt.Pos())
|
||||
info.Suggestions = append(info.Suggestions, IdiomItem{
|
||||
Type: "error_handling",
|
||||
Description: "Consider Go error handling patterns",
|
||||
Suggestion: "Use 'if err != nil' pattern",
|
||||
Position: newPosition(pos),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// Check for receiver naming
|
||||
if fn, ok := n.(*ast.FuncDecl); ok && fn.Recv != nil {
|
||||
for _, recv := range fn.Recv.List {
|
||||
if len(recv.Names) > 0 {
|
||||
name := recv.Names[0].Name
|
||||
if len(name) > 1 && !isValidReceiverName(name) {
|
||||
pos := fset.Position(recv.Pos())
|
||||
info.Violations = append(info.Violations, IdiomItem{
|
||||
Type: "receiver_naming",
|
||||
Description: "Receiver name should be short abbreviation",
|
||||
Suggestion: "Use 1-2 character receiver names",
|
||||
Position: newPosition(pos),
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return true
|
||||
})
|
||||
|
||||
if len(info.Violations) > 0 || len(info.Suggestions) > 0 {
|
||||
idioms = append(idioms, info)
|
||||
}
|
||||
return nil
|
||||
})
|
||||
|
||||
return idioms, err
|
||||
}
|
||||
|
||||
func isValidReceiverName(name string) bool {
|
||||
return len(name) <= 2 && strings.ToLower(name) == name
|
||||
}
|
||||
119
tool_analyze_test_quality.go
Normal file
119
tool_analyze_test_quality.go
Normal file
@@ -0,0 +1,119 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"go/ast"
|
||||
"go/token"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// Test quality types
|
||||
type TestQualityInfo struct {
|
||||
File string `json:"file"`
|
||||
TestMetrics TestMetrics `json:"metrics"`
|
||||
Issues []TestIssue `json:"issues,omitempty"`
|
||||
Suggestions []string `json:"suggestions,omitempty"`
|
||||
}
|
||||
|
||||
type TestMetrics struct {
|
||||
TotalTests int `json:"total_tests"`
|
||||
TableDriven int `json:"table_driven"`
|
||||
Benchmarks int `json:"benchmarks"`
|
||||
Examples int `json:"examples"`
|
||||
Coverage float64 `json:"estimated_coverage"`
|
||||
}
|
||||
|
||||
type TestIssue struct {
|
||||
Type string `json:"type"`
|
||||
Description string `json:"description"`
|
||||
Severity string `json:"severity"`
|
||||
Position Position `json:"position"`
|
||||
}
|
||||
|
||||
func analyzeTestQuality(dir string) ([]TestQualityInfo, error) {
|
||||
var testQuality []TestQualityInfo
|
||||
|
||||
err := walkGoFiles(dir, func(path string, src []byte, file *ast.File, fset *token.FileSet) error {
|
||||
if !strings.HasSuffix(path, "_test.go") {
|
||||
return nil
|
||||
}
|
||||
|
||||
info := TestQualityInfo{
|
||||
File: path,
|
||||
}
|
||||
|
||||
for _, decl := range file.Decls {
|
||||
if fn, ok := decl.(*ast.FuncDecl); ok {
|
||||
name := fn.Name.Name
|
||||
if strings.HasPrefix(name, "Test") {
|
||||
info.TestMetrics.TotalTests++
|
||||
|
||||
// Check for table-driven tests
|
||||
if hasTableDrivenPattern(fn) {
|
||||
info.TestMetrics.TableDriven++
|
||||
}
|
||||
|
||||
// Check for proper assertions
|
||||
if !hasProperAssertions(fn) {
|
||||
pos := fset.Position(fn.Pos())
|
||||
info.Issues = append(info.Issues, TestIssue{
|
||||
Type: "weak_assertions",
|
||||
Description: "Test lacks proper assertions",
|
||||
Severity: "medium",
|
||||
Position: newPosition(pos),
|
||||
})
|
||||
}
|
||||
} else if strings.HasPrefix(name, "Benchmark") {
|
||||
info.TestMetrics.Benchmarks++
|
||||
} else if strings.HasPrefix(name, "Example") {
|
||||
info.TestMetrics.Examples++
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if info.TestMetrics.TotalTests > 0 {
|
||||
testQuality = append(testQuality, info)
|
||||
}
|
||||
return nil
|
||||
})
|
||||
|
||||
return testQuality, err
|
||||
}
|
||||
|
||||
func hasTableDrivenPattern(fn *ast.FuncDecl) bool {
|
||||
// Look for table-driven test patterns
|
||||
found := false
|
||||
ast.Inspect(fn, func(n ast.Node) bool {
|
||||
if genDecl, ok := n.(*ast.GenDecl); ok {
|
||||
for _, spec := range genDecl.Specs {
|
||||
if valueSpec, ok := spec.(*ast.ValueSpec); ok {
|
||||
for _, name := range valueSpec.Names {
|
||||
if strings.Contains(name.Name, "test") || strings.Contains(name.Name, "case") {
|
||||
found = true
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return true
|
||||
})
|
||||
return found
|
||||
}
|
||||
|
||||
func hasProperAssertions(fn *ast.FuncDecl) bool {
|
||||
// Look for testing.T calls
|
||||
found := false
|
||||
ast.Inspect(fn, func(n ast.Node) bool {
|
||||
if callExpr, ok := n.(*ast.CallExpr); ok {
|
||||
if selExpr, ok := callExpr.Fun.(*ast.SelectorExpr); ok {
|
||||
if ident, ok := selExpr.X.(*ast.Ident); ok && ident.Name == "t" {
|
||||
if selExpr.Sel.Name == "Error" || selExpr.Sel.Name == "Fatal" ||
|
||||
selExpr.Sel.Name == "Fail" {
|
||||
found = true
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return true
|
||||
})
|
||||
return found
|
||||
}
|
||||
122
tool_analyze_tests.go
Normal file
122
tool_analyze_tests.go
Normal file
@@ -0,0 +1,122 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"go/ast"
|
||||
"go/token"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// Test analysis types
|
||||
type TestAnalysis struct {
|
||||
TestFiles []TestFile `json:"test_files"`
|
||||
ExportedFunctions []ExportedFunc `json:"exported_functions"`
|
||||
TestCoverage TestCoverage `json:"coverage_summary"`
|
||||
}
|
||||
|
||||
type TestFile struct {
|
||||
File string `json:"file"`
|
||||
Package string `json:"package"`
|
||||
Tests []string `json:"tests"`
|
||||
Benchmarks []string `json:"benchmarks,omitempty"`
|
||||
Examples []string `json:"examples,omitempty"`
|
||||
}
|
||||
|
||||
type ExportedFunc struct {
|
||||
Name string `json:"name"`
|
||||
Package string `json:"package"`
|
||||
Tested bool `json:"tested"`
|
||||
Position Position `json:"position"`
|
||||
}
|
||||
|
||||
type TestCoverage struct {
|
||||
TotalExported int `json:"total_exported"`
|
||||
TotalTested int `json:"total_tested"`
|
||||
Percentage float64 `json:"percentage"`
|
||||
}
|
||||
|
||||
func analyzeTests(dir string) (*TestAnalysis, error) {
|
||||
analysis := &TestAnalysis{
|
||||
TestFiles: []TestFile{},
|
||||
ExportedFunctions: []ExportedFunc{},
|
||||
}
|
||||
|
||||
// Collect all exported functions
|
||||
exportedFuncs := make(map[string]*ExportedFunc)
|
||||
|
||||
err := walkGoFiles(dir, func(path string, src []byte, file *ast.File, fset *token.FileSet) error {
|
||||
if strings.HasSuffix(path, "_test.go") {
|
||||
// Process test files
|
||||
testFile := TestFile{
|
||||
File: path,
|
||||
Package: file.Name.Name,
|
||||
}
|
||||
|
||||
for _, decl := range file.Decls {
|
||||
if fn, ok := decl.(*ast.FuncDecl); ok {
|
||||
name := fn.Name.Name
|
||||
if strings.HasPrefix(name, "Test") {
|
||||
testFile.Tests = append(testFile.Tests, name)
|
||||
} else if strings.HasPrefix(name, "Benchmark") {
|
||||
testFile.Benchmarks = append(testFile.Benchmarks, name)
|
||||
} else if strings.HasPrefix(name, "Example") {
|
||||
testFile.Examples = append(testFile.Examples, name)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if len(testFile.Tests) > 0 || len(testFile.Benchmarks) > 0 || len(testFile.Examples) > 0 {
|
||||
analysis.TestFiles = append(analysis.TestFiles, testFile)
|
||||
}
|
||||
} else {
|
||||
// Collect exported functions
|
||||
for _, decl := range file.Decls {
|
||||
if fn, ok := decl.(*ast.FuncDecl); ok && ast.IsExported(fn.Name.Name) {
|
||||
key := file.Name.Name + "." + fn.Name.Name
|
||||
pos := fset.Position(fn.Pos())
|
||||
exportedFuncs[key] = &ExportedFunc{
|
||||
Name: fn.Name.Name,
|
||||
Package: file.Name.Name,
|
||||
Tested: false,
|
||||
Position: newPosition(pos),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Check which functions are tested
|
||||
for _, testFile := range analysis.TestFiles {
|
||||
for _, testName := range testFile.Tests {
|
||||
// Simple heuristic: TestFunctionName tests FunctionName
|
||||
funcName := strings.TrimPrefix(testName, "Test")
|
||||
key := testFile.Package + "." + funcName
|
||||
if fn, exists := exportedFuncs[key]; exists {
|
||||
fn.Tested = true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Convert map to slice and calculate coverage
|
||||
tested := 0
|
||||
for _, fn := range exportedFuncs {
|
||||
analysis.ExportedFunctions = append(analysis.ExportedFunctions, *fn)
|
||||
if fn.Tested {
|
||||
tested++
|
||||
}
|
||||
}
|
||||
|
||||
analysis.TestCoverage = TestCoverage{
|
||||
TotalExported: len(exportedFuncs),
|
||||
TotalTested: tested,
|
||||
}
|
||||
if len(exportedFuncs) > 0 {
|
||||
analysis.TestCoverage.Percentage = float64(tested) / float64(len(exportedFuncs)) * 100
|
||||
}
|
||||
|
||||
return analysis, nil
|
||||
}
|
||||
131
tool_extract_api.go
Normal file
131
tool_extract_api.go
Normal file
@@ -0,0 +1,131 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"go/ast"
|
||||
"go/token"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// API analysis types
|
||||
type ApiInfo struct {
|
||||
Package string `json:"package"`
|
||||
Functions []ApiFunction `json:"functions"`
|
||||
Types []ApiType `json:"types"`
|
||||
Constants []ApiConstant `json:"constants"`
|
||||
Variables []ApiVariable `json:"variables"`
|
||||
}
|
||||
|
||||
type ApiFunction struct {
|
||||
Name string `json:"name"`
|
||||
Signature string `json:"signature"`
|
||||
Doc string `json:"doc,omitempty"`
|
||||
Position Position `json:"position"`
|
||||
}
|
||||
|
||||
type ApiType struct {
|
||||
Name string `json:"name"`
|
||||
Kind string `json:"kind"`
|
||||
Doc string `json:"doc,omitempty"`
|
||||
Position Position `json:"position"`
|
||||
}
|
||||
|
||||
type ApiConstant struct {
|
||||
Name string `json:"name"`
|
||||
Type string `json:"type"`
|
||||
Value string `json:"value,omitempty"`
|
||||
Doc string `json:"doc,omitempty"`
|
||||
Position Position `json:"position"`
|
||||
}
|
||||
|
||||
type ApiVariable struct {
|
||||
Name string `json:"name"`
|
||||
Type string `json:"type"`
|
||||
Doc string `json:"doc,omitempty"`
|
||||
Position Position `json:"position"`
|
||||
}
|
||||
|
||||
func extractApi(dir string) ([]ApiInfo, error) {
|
||||
var apis []ApiInfo
|
||||
|
||||
err := walkGoFiles(dir, func(path string, src []byte, file *ast.File, fset *token.FileSet) error {
|
||||
if strings.HasSuffix(path, "_test.go") {
|
||||
return nil
|
||||
}
|
||||
|
||||
api := ApiInfo{
|
||||
Package: file.Name.Name,
|
||||
}
|
||||
|
||||
for _, decl := range file.Decls {
|
||||
switch d := decl.(type) {
|
||||
case *ast.FuncDecl:
|
||||
if ast.IsExported(d.Name.Name) {
|
||||
pos := fset.Position(d.Pos())
|
||||
api.Functions = append(api.Functions, ApiFunction{
|
||||
Name: d.Name.Name,
|
||||
Signature: funcSignature(d.Type),
|
||||
Doc: extractDocString(d.Doc),
|
||||
Position: newPosition(pos),
|
||||
})
|
||||
}
|
||||
|
||||
case *ast.GenDecl:
|
||||
for _, spec := range d.Specs {
|
||||
switch s := spec.(type) {
|
||||
case *ast.TypeSpec:
|
||||
if ast.IsExported(s.Name.Name) {
|
||||
pos := fset.Position(s.Pos())
|
||||
kind := "type"
|
||||
switch s.Type.(type) {
|
||||
case *ast.StructType:
|
||||
kind = "struct"
|
||||
case *ast.InterfaceType:
|
||||
kind = "interface"
|
||||
}
|
||||
api.Types = append(api.Types, ApiType{
|
||||
Name: s.Name.Name,
|
||||
Kind: kind,
|
||||
Doc: extractDocString(d.Doc),
|
||||
Position: newPosition(pos),
|
||||
})
|
||||
}
|
||||
|
||||
case *ast.ValueSpec:
|
||||
for _, name := range s.Names {
|
||||
if ast.IsExported(name.Name) {
|
||||
pos := fset.Position(name.Pos())
|
||||
if d.Tok == token.CONST {
|
||||
value := ""
|
||||
if len(s.Values) > 0 {
|
||||
value = exprToString(s.Values[0])
|
||||
}
|
||||
api.Constants = append(api.Constants, ApiConstant{
|
||||
Name: name.Name,
|
||||
Type: exprToString(s.Type),
|
||||
Value: value,
|
||||
Doc: extractDocString(d.Doc),
|
||||
Position: newPosition(pos),
|
||||
})
|
||||
} else {
|
||||
api.Variables = append(api.Variables, ApiVariable{
|
||||
Name: name.Name,
|
||||
Type: exprToString(s.Type),
|
||||
Doc: extractDocString(d.Doc),
|
||||
Position: newPosition(pos),
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if len(api.Functions) > 0 || len(api.Types) > 0 || len(api.Constants) > 0 || len(api.Variables) > 0 {
|
||||
apis = append(apis, api)
|
||||
}
|
||||
return nil
|
||||
})
|
||||
|
||||
return apis, err
|
||||
}
|
||||
123
tool_extract_interfaces.go
Normal file
123
tool_extract_interfaces.go
Normal file
@@ -0,0 +1,123 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"go/ast"
|
||||
"go/token"
|
||||
)
|
||||
|
||||
// Interface analysis types
|
||||
type InterfaceInfo struct {
|
||||
Name string `json:"name"`
|
||||
Package string `json:"package"`
|
||||
Position Position `json:"position"`
|
||||
Methods []MethodInfo `json:"methods"`
|
||||
Implementations []ImplementationType `json:"implementations,omitempty"`
|
||||
}
|
||||
|
||||
type ImplementationType struct {
|
||||
Type string `json:"type"`
|
||||
Package string `json:"package"`
|
||||
Position Position `json:"position"`
|
||||
}
|
||||
|
||||
func extractInterfaces(dir string, interfaceName string) ([]InterfaceInfo, error) {
|
||||
var interfaces []InterfaceInfo
|
||||
interfaceMap := make(map[string]*InterfaceInfo)
|
||||
|
||||
// First pass: collect all interfaces
|
||||
err := walkGoFiles(dir, func(path string, src []byte, file *ast.File, fset *token.FileSet) error {
|
||||
ast.Inspect(file, func(n ast.Node) bool {
|
||||
if genDecl, ok := n.(*ast.GenDecl); ok {
|
||||
for _, spec := range genDecl.Specs {
|
||||
if typeSpec, ok := spec.(*ast.TypeSpec); ok {
|
||||
if iface, ok := typeSpec.Type.(*ast.InterfaceType); ok {
|
||||
name := typeSpec.Name.Name
|
||||
if interfaceName == "" || name == interfaceName {
|
||||
pos := fset.Position(typeSpec.Pos())
|
||||
info := &InterfaceInfo{
|
||||
Name: name,
|
||||
Package: file.Name.Name,
|
||||
Position: newPosition(pos),
|
||||
Methods: extractInterfaceMethods(iface, fset),
|
||||
}
|
||||
interfaceMap[name] = info
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return true
|
||||
})
|
||||
return nil
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Second pass: find implementations
|
||||
if interfaceName != "" {
|
||||
iface, exists := interfaceMap[interfaceName]
|
||||
if exists {
|
||||
err = walkGoFiles(dir, func(path string, src []byte, file *ast.File, fset *token.FileSet) error {
|
||||
// Collect all types with methods
|
||||
types := make(map[string][]string)
|
||||
|
||||
for _, decl := range file.Decls {
|
||||
if fn, ok := decl.(*ast.FuncDecl); ok && fn.Recv != nil {
|
||||
for _, recv := range fn.Recv.List {
|
||||
typeName := getTypeName(recv.Type)
|
||||
types[typeName] = append(types[typeName], fn.Name.Name)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Check if any type implements the interface
|
||||
for typeName, methods := range types {
|
||||
if implementsInterface(methods, iface.Methods) {
|
||||
// Find type declaration
|
||||
ast.Inspect(file, func(n ast.Node) bool {
|
||||
if genDecl, ok := n.(*ast.GenDecl); ok {
|
||||
for _, spec := range genDecl.Specs {
|
||||
if typeSpec, ok := spec.(*ast.TypeSpec); ok && typeSpec.Name.Name == typeName {
|
||||
pos := fset.Position(typeSpec.Pos())
|
||||
iface.Implementations = append(iface.Implementations, ImplementationType{
|
||||
Type: typeName,
|
||||
Package: file.Name.Name,
|
||||
Position: newPosition(pos),
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
return true
|
||||
})
|
||||
}
|
||||
}
|
||||
return nil
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// Convert map to slice
|
||||
for _, iface := range interfaceMap {
|
||||
interfaces = append(interfaces, *iface)
|
||||
}
|
||||
|
||||
return interfaces, err
|
||||
}
|
||||
|
||||
func implementsInterface(methods []string, interfaceMethods []MethodInfo) bool {
|
||||
for _, im := range interfaceMethods {
|
||||
found := false
|
||||
for _, m := range methods {
|
||||
if m == im.Name {
|
||||
found = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if !found {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
98
tool_find_comments.go
Normal file
98
tool_find_comments.go
Normal file
@@ -0,0 +1,98 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"go/ast"
|
||||
"go/token"
|
||||
"regexp"
|
||||
)
|
||||
|
||||
// Comment analysis types
|
||||
type CommentInfo struct {
|
||||
File string `json:"file"`
|
||||
TODOs []CommentItem `json:"todos,omitempty"`
|
||||
Undocumented []CommentItem `json:"undocumented,omitempty"`
|
||||
}
|
||||
|
||||
type CommentItem struct {
|
||||
Name string `json:"name"`
|
||||
Comment string `json:"comment,omitempty"`
|
||||
Type string `json:"type"`
|
||||
Position Position `json:"position"`
|
||||
}
|
||||
|
||||
func findComments(dir string, commentType string) ([]CommentInfo, error) {
|
||||
var comments []CommentInfo
|
||||
|
||||
err := walkGoFiles(dir, func(path string, src []byte, file *ast.File, fset *token.FileSet) error {
|
||||
info := CommentInfo{
|
||||
File: path,
|
||||
}
|
||||
|
||||
// Find TODOs in comments
|
||||
if commentType == "todo" || commentType == "all" {
|
||||
todoRegex := regexp.MustCompile(`(?i)\b(todo|fixme|hack|bug|xxx)\b`)
|
||||
for _, cg := range file.Comments {
|
||||
for _, c := range cg.List {
|
||||
if todoRegex.MatchString(c.Text) {
|
||||
pos := fset.Position(c.Pos())
|
||||
info.TODOs = append(info.TODOs, CommentItem{
|
||||
Comment: c.Text,
|
||||
Type: "todo",
|
||||
Position: newPosition(pos),
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Find undocumented exported symbols
|
||||
if commentType == "undocumented" || commentType == "all" {
|
||||
ast.Inspect(file, func(n ast.Node) bool {
|
||||
switch x := n.(type) {
|
||||
case *ast.FuncDecl:
|
||||
if ast.IsExported(x.Name.Name) && x.Doc == nil {
|
||||
pos := fset.Position(x.Pos())
|
||||
info.Undocumented = append(info.Undocumented, CommentItem{
|
||||
Name: x.Name.Name,
|
||||
Type: "function",
|
||||
Position: newPosition(pos),
|
||||
})
|
||||
}
|
||||
case *ast.GenDecl:
|
||||
for _, spec := range x.Specs {
|
||||
switch s := spec.(type) {
|
||||
case *ast.TypeSpec:
|
||||
if ast.IsExported(s.Name.Name) && x.Doc == nil && s.Doc == nil {
|
||||
pos := fset.Position(s.Pos())
|
||||
info.Undocumented = append(info.Undocumented, CommentItem{
|
||||
Name: s.Name.Name,
|
||||
Type: "type",
|
||||
Position: newPosition(pos),
|
||||
})
|
||||
}
|
||||
case *ast.ValueSpec:
|
||||
for _, name := range s.Names {
|
||||
if ast.IsExported(name.Name) && x.Doc == nil && s.Doc == nil {
|
||||
pos := fset.Position(name.Pos())
|
||||
info.Undocumented = append(info.Undocumented, CommentItem{
|
||||
Name: name.Name,
|
||||
Type: "value",
|
||||
Position: newPosition(pos),
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return true
|
||||
})
|
||||
}
|
||||
|
||||
if len(info.TODOs) > 0 || len(info.Undocumented) > 0 {
|
||||
comments = append(comments, info)
|
||||
}
|
||||
return nil
|
||||
})
|
||||
|
||||
return comments, err
|
||||
}
|
||||
70
tool_find_context_usage.go
Normal file
70
tool_find_context_usage.go
Normal file
@@ -0,0 +1,70 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"go/ast"
|
||||
"go/token"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// Context usage types
|
||||
type ContextInfo struct {
|
||||
File string `json:"file"`
|
||||
MissingContext []ContextUsage `json:"missing_context,omitempty"`
|
||||
ProperUsage []ContextUsage `json:"proper_usage,omitempty"`
|
||||
ImproperUsage []ContextUsage `json:"improper_usage,omitempty"`
|
||||
}
|
||||
|
||||
type ContextUsage struct {
|
||||
Function string `json:"function"`
|
||||
Type string `json:"type"`
|
||||
Description string `json:"description"`
|
||||
Position Position `json:"position"`
|
||||
}
|
||||
|
||||
func findContextUsage(dir string) ([]ContextInfo, error) {
|
||||
var contextInfo []ContextInfo
|
||||
|
||||
err := walkGoFiles(dir, func(path string, src []byte, file *ast.File, fset *token.FileSet) error {
|
||||
info := ContextInfo{
|
||||
File: path,
|
||||
}
|
||||
|
||||
ast.Inspect(file, func(n ast.Node) bool {
|
||||
if fn, ok := n.(*ast.FuncDecl); ok && fn.Type.Params != nil {
|
||||
hasContext := false
|
||||
for _, param := range fn.Type.Params.List {
|
||||
if exprToString(param.Type) == "context.Context" {
|
||||
hasContext = true
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
// Check if function should have context
|
||||
if !hasContext && shouldHaveContext(fn) {
|
||||
pos := fset.Position(fn.Pos())
|
||||
info.MissingContext = append(info.MissingContext, ContextUsage{
|
||||
Function: fn.Name.Name,
|
||||
Type: "missing",
|
||||
Description: "Function should accept context.Context",
|
||||
Position: newPosition(pos),
|
||||
})
|
||||
}
|
||||
}
|
||||
return true
|
||||
})
|
||||
|
||||
if len(info.MissingContext) > 0 || len(info.ProperUsage) > 0 || len(info.ImproperUsage) > 0 {
|
||||
contextInfo = append(contextInfo, info)
|
||||
}
|
||||
return nil
|
||||
})
|
||||
|
||||
return contextInfo, err
|
||||
}
|
||||
|
||||
func shouldHaveContext(fn *ast.FuncDecl) bool {
|
||||
// Simple heuristic: functions that might do I/O
|
||||
name := strings.ToLower(fn.Name.Name)
|
||||
return strings.Contains(name, "get") || strings.Contains(name, "fetch") ||
|
||||
strings.Contains(name, "load") || strings.Contains(name, "save")
|
||||
}
|
||||
107
tool_find_dead_code.go
Normal file
107
tool_find_dead_code.go
Normal file
@@ -0,0 +1,107 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"go/ast"
|
||||
"go/token"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// Dead code analysis types
|
||||
type DeadCodeInfo struct {
|
||||
File string `json:"file"`
|
||||
UnusedVars []UnusedItem `json:"unused_vars,omitempty"`
|
||||
UnreachableCode []CodeLocation `json:"unreachable_code,omitempty"`
|
||||
DeadBranches []CodeLocation `json:"dead_branches,omitempty"`
|
||||
}
|
||||
|
||||
type UnusedItem struct {
|
||||
Name string `json:"name"`
|
||||
Type string `json:"type"`
|
||||
Position Position `json:"position"`
|
||||
}
|
||||
|
||||
type CodeLocation struct {
|
||||
Description string `json:"description"`
|
||||
Position Position `json:"position"`
|
||||
}
|
||||
|
||||
func findDeadCode(dir string) ([]DeadCodeInfo, error) {
|
||||
var deadCode []DeadCodeInfo
|
||||
|
||||
err := walkGoFiles(dir, func(path string, src []byte, file *ast.File, fset *token.FileSet) error {
|
||||
if strings.HasSuffix(path, "_test.go") {
|
||||
return nil
|
||||
}
|
||||
|
||||
info := DeadCodeInfo{
|
||||
File: path,
|
||||
}
|
||||
|
||||
// Track variable usage
|
||||
declaredVars := make(map[string]*ast.ValueSpec)
|
||||
usedVars := make(map[string]bool)
|
||||
|
||||
// First pass: collect declared variables
|
||||
ast.Inspect(file, func(n ast.Node) bool {
|
||||
if genDecl, ok := n.(*ast.GenDecl); ok && genDecl.Tok == token.VAR {
|
||||
for _, spec := range genDecl.Specs {
|
||||
if valueSpec, ok := spec.(*ast.ValueSpec); ok {
|
||||
for _, name := range valueSpec.Names {
|
||||
if name.Name != "_" && !ast.IsExported(name.Name) {
|
||||
declaredVars[name.Name] = valueSpec
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return true
|
||||
})
|
||||
|
||||
// Second pass: track usage
|
||||
ast.Inspect(file, func(n ast.Node) bool {
|
||||
if ident, ok := n.(*ast.Ident); ok {
|
||||
usedVars[ident.Name] = true
|
||||
}
|
||||
return true
|
||||
})
|
||||
|
||||
// Find unused variables
|
||||
for varName, valueSpec := range declaredVars {
|
||||
if !usedVars[varName] {
|
||||
for _, name := range valueSpec.Names {
|
||||
if name.Name == varName {
|
||||
pos := fset.Position(name.Pos())
|
||||
info.UnusedVars = append(info.UnusedVars, UnusedItem{
|
||||
Name: varName,
|
||||
Type: "variable",
|
||||
Position: newPosition(pos),
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Find unreachable code (simplified detection)
|
||||
ast.Inspect(file, func(n ast.Node) bool {
|
||||
if blockStmt, ok := n.(*ast.BlockStmt); ok {
|
||||
for i, stmt := range blockStmt.List {
|
||||
if _, ok := stmt.(*ast.ReturnStmt); ok && i < len(blockStmt.List)-1 {
|
||||
pos := fset.Position(blockStmt.List[i+1].Pos())
|
||||
info.UnreachableCode = append(info.UnreachableCode, CodeLocation{
|
||||
Description: "Code after return statement",
|
||||
Position: newPosition(pos),
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
return true
|
||||
})
|
||||
|
||||
if len(info.UnusedVars) > 0 || len(info.UnreachableCode) > 0 || len(info.DeadBranches) > 0 {
|
||||
deadCode = append(deadCode, info)
|
||||
}
|
||||
return nil
|
||||
})
|
||||
|
||||
return deadCode, err
|
||||
}
|
||||
51
tool_find_deprecated.go
Normal file
51
tool_find_deprecated.go
Normal file
@@ -0,0 +1,51 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"go/ast"
|
||||
"go/token"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// Deprecated usage types
|
||||
type DeprecatedInfo struct {
|
||||
File string `json:"file"`
|
||||
Usage []DeprecatedUsage `json:"usage"`
|
||||
}
|
||||
|
||||
type DeprecatedUsage struct {
|
||||
Item string `json:"item"`
|
||||
Alternative string `json:"alternative,omitempty"`
|
||||
Reason string `json:"reason,omitempty"`
|
||||
Position Position `json:"position"`
|
||||
}
|
||||
|
||||
func findDeprecated(dir string) ([]DeprecatedInfo, error) {
|
||||
var deprecated []DeprecatedInfo
|
||||
|
||||
err := walkGoFiles(dir, func(path string, src []byte, file *ast.File, fset *token.FileSet) error {
|
||||
info := DeprecatedInfo{
|
||||
File: path,
|
||||
}
|
||||
|
||||
// Look for deprecated comments
|
||||
for _, cg := range file.Comments {
|
||||
for _, c := range cg.List {
|
||||
if strings.Contains(strings.ToLower(c.Text), "deprecated") {
|
||||
pos := fset.Position(c.Pos())
|
||||
info.Usage = append(info.Usage, DeprecatedUsage{
|
||||
Item: "deprecated_comment",
|
||||
Reason: c.Text,
|
||||
Position: newPosition(pos),
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if len(info.Usage) > 0 {
|
||||
deprecated = append(deprecated, info)
|
||||
}
|
||||
return nil
|
||||
})
|
||||
|
||||
return deprecated, err
|
||||
}
|
||||
69
tool_find_duplicates.go
Normal file
69
tool_find_duplicates.go
Normal file
@@ -0,0 +1,69 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"crypto/md5"
|
||||
"fmt"
|
||||
"go/ast"
|
||||
"go/token"
|
||||
)
|
||||
|
||||
// Code duplication types
|
||||
type DuplicateInfo struct {
|
||||
Similarity float64 `json:"similarity"`
|
||||
Locations []DuplicateLocation `json:"locations"`
|
||||
Content string `json:"content"`
|
||||
}
|
||||
|
||||
type DuplicateLocation struct {
|
||||
File string `json:"file"`
|
||||
Function string `json:"function"`
|
||||
Position Position `json:"position"`
|
||||
}
|
||||
|
||||
func findDuplicates(dir string, threshold float64) ([]DuplicateInfo, error) {
|
||||
var duplicates []DuplicateInfo
|
||||
functionBodies := make(map[string][]DuplicateLocation)
|
||||
|
||||
err := walkGoFiles(dir, func(path string, src []byte, file *ast.File, fset *token.FileSet) error {
|
||||
ast.Inspect(file, func(n ast.Node) bool {
|
||||
if fn, ok := n.(*ast.FuncDecl); ok && fn.Body != nil {
|
||||
body := extractFunctionBody(fn.Body, fset)
|
||||
hash := fmt.Sprintf("%x", md5.Sum([]byte(body)))
|
||||
|
||||
pos := fset.Position(fn.Pos())
|
||||
location := DuplicateLocation{
|
||||
File: path,
|
||||
Function: fn.Name.Name,
|
||||
Position: newPosition(pos),
|
||||
}
|
||||
|
||||
functionBodies[hash] = append(functionBodies[hash], location)
|
||||
}
|
||||
return true
|
||||
})
|
||||
return nil
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Find duplicates
|
||||
for hash, locations := range functionBodies {
|
||||
if len(locations) > 1 {
|
||||
duplicates = append(duplicates, DuplicateInfo{
|
||||
Similarity: 1.0,
|
||||
Locations: locations,
|
||||
Content: hash,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
return duplicates, nil
|
||||
}
|
||||
|
||||
func extractFunctionBody(body *ast.BlockStmt, fset *token.FileSet) string {
|
||||
start := fset.Position(body.Pos())
|
||||
end := fset.Position(body.End())
|
||||
return fmt.Sprintf("%d-%d", start.Line, end.Line)
|
||||
}
|
||||
102
tool_find_errors.go
Normal file
102
tool_find_errors.go
Normal file
@@ -0,0 +1,102 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"go/ast"
|
||||
"go/token"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// Error handling types
|
||||
type ErrorInfo struct {
|
||||
File string `json:"file"`
|
||||
UnhandledErrors []ErrorContext `json:"unhandled_errors,omitempty"`
|
||||
ErrorChecks []ErrorContext `json:"error_checks,omitempty"`
|
||||
ErrorReturns []ErrorContext `json:"error_returns,omitempty"`
|
||||
}
|
||||
|
||||
type ErrorContext struct {
|
||||
Context string `json:"context"`
|
||||
Type string `json:"type"`
|
||||
Position Position `json:"position"`
|
||||
}
|
||||
|
||||
func findErrors(dir string) ([]ErrorInfo, error) {
|
||||
var errors []ErrorInfo
|
||||
|
||||
err := walkGoFiles(dir, func(path string, src []byte, file *ast.File, fset *token.FileSet) error {
|
||||
info := ErrorInfo{
|
||||
File: path,
|
||||
}
|
||||
|
||||
ast.Inspect(file, func(n ast.Node) bool {
|
||||
switch x := n.(type) {
|
||||
// Find function calls that return errors but aren't checked
|
||||
case *ast.ExprStmt:
|
||||
if call, ok := x.X.(*ast.CallExpr); ok {
|
||||
// Check if this function likely returns an error
|
||||
if returnsError(call, file) {
|
||||
pos := fset.Position(call.Pos())
|
||||
context := extractContext(src, pos)
|
||||
info.UnhandledErrors = append(info.UnhandledErrors, ErrorContext{
|
||||
Context: context,
|
||||
Type: "unchecked_call",
|
||||
Position: newPosition(pos),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// Find error checks
|
||||
case *ast.IfStmt:
|
||||
if isErrorCheck(x) {
|
||||
pos := fset.Position(x.Pos())
|
||||
context := extractContext(src, pos)
|
||||
info.ErrorChecks = append(info.ErrorChecks, ErrorContext{
|
||||
Context: context,
|
||||
Type: "error_check",
|
||||
Position: newPosition(pos),
|
||||
})
|
||||
}
|
||||
|
||||
// Find error returns
|
||||
case *ast.ReturnStmt:
|
||||
for _, result := range x.Results {
|
||||
if ident, ok := result.(*ast.Ident); ok && (ident.Name == "err" || strings.Contains(ident.Name, "error")) {
|
||||
pos := fset.Position(x.Pos())
|
||||
context := extractContext(src, pos)
|
||||
info.ErrorReturns = append(info.ErrorReturns, ErrorContext{
|
||||
Context: context,
|
||||
Type: "error_return",
|
||||
Position: newPosition(pos),
|
||||
})
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
return true
|
||||
})
|
||||
|
||||
if len(info.UnhandledErrors) > 0 || len(info.ErrorChecks) > 0 || len(info.ErrorReturns) > 0 {
|
||||
errors = append(errors, info)
|
||||
}
|
||||
return nil
|
||||
})
|
||||
|
||||
return errors, err
|
||||
}
|
||||
|
||||
func returnsError(call *ast.CallExpr, file *ast.File) bool {
|
||||
// Simple heuristic: check if the function name suggests it returns an error
|
||||
switch fun := call.Fun.(type) {
|
||||
case *ast.Ident:
|
||||
name := fun.Name
|
||||
return strings.HasPrefix(name, "New") || strings.HasPrefix(name, "Create") ||
|
||||
strings.HasPrefix(name, "Open") || strings.HasPrefix(name, "Read") ||
|
||||
strings.HasPrefix(name, "Write") || strings.HasPrefix(name, "Parse")
|
||||
case *ast.SelectorExpr:
|
||||
name := fun.Sel.Name
|
||||
return strings.HasPrefix(name, "New") || strings.HasPrefix(name, "Create") ||
|
||||
strings.HasPrefix(name, "Open") || strings.HasPrefix(name, "Read") ||
|
||||
strings.HasPrefix(name, "Write") || strings.HasPrefix(name, "Parse")
|
||||
}
|
||||
return false
|
||||
}
|
||||
57
tool_find_function_calls.go
Normal file
57
tool_find_function_calls.go
Normal file
@@ -0,0 +1,57 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"go/ast"
|
||||
"go/token"
|
||||
)
|
||||
|
||||
// Function call types
|
||||
type FunctionCall struct {
|
||||
Caller string `json:"caller"`
|
||||
Context string `json:"context"`
|
||||
Position Position `json:"position"`
|
||||
}
|
||||
|
||||
func findFunctionCalls(dir string, functionName string) ([]FunctionCall, error) {
|
||||
var calls []FunctionCall
|
||||
|
||||
err := walkGoFiles(dir, func(path string, src []byte, file *ast.File, fset *token.FileSet) error {
|
||||
currentFunc := ""
|
||||
|
||||
ast.Inspect(file, func(n ast.Node) bool {
|
||||
// Track current function context
|
||||
if fn, ok := n.(*ast.FuncDecl); ok {
|
||||
currentFunc = fn.Name.Name
|
||||
return true
|
||||
}
|
||||
|
||||
// Find function calls
|
||||
switch x := n.(type) {
|
||||
case *ast.CallExpr:
|
||||
var calledName string
|
||||
switch fun := x.Fun.(type) {
|
||||
case *ast.Ident:
|
||||
calledName = fun.Name
|
||||
case *ast.SelectorExpr:
|
||||
calledName = fun.Sel.Name
|
||||
}
|
||||
|
||||
if calledName == functionName {
|
||||
pos := fset.Position(x.Pos())
|
||||
context := extractContext(src, pos)
|
||||
|
||||
calls = append(calls, FunctionCall{
|
||||
Caller: currentFunc,
|
||||
Context: context,
|
||||
Position: newPosition(pos),
|
||||
})
|
||||
}
|
||||
}
|
||||
return true
|
||||
})
|
||||
|
||||
return nil
|
||||
})
|
||||
|
||||
return calls, err
|
||||
}
|
||||
99
tool_find_generics.go
Normal file
99
tool_find_generics.go
Normal file
@@ -0,0 +1,99 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"go/ast"
|
||||
"go/token"
|
||||
)
|
||||
|
||||
// Generic types
|
||||
type GenericInfo struct {
|
||||
Name string `json:"name"`
|
||||
Kind string `json:"kind"`
|
||||
Package string `json:"package"`
|
||||
Position Position `json:"position"`
|
||||
TypeParams []TypeParam `json:"type_params"`
|
||||
Instances []Instance `json:"instances,omitempty"`
|
||||
}
|
||||
|
||||
type TypeParam struct {
|
||||
Name string `json:"name"`
|
||||
Constraint string `json:"constraint"`
|
||||
Position Position `json:"position"`
|
||||
}
|
||||
|
||||
type Instance struct {
|
||||
Types []string `json:"types"`
|
||||
Position Position `json:"position"`
|
||||
}
|
||||
|
||||
func findGenerics(dir string) ([]GenericInfo, error) {
|
||||
var generics []GenericInfo
|
||||
|
||||
err := walkGoFiles(dir, func(path string, src []byte, file *ast.File, fset *token.FileSet) error {
|
||||
ast.Inspect(file, func(n ast.Node) bool {
|
||||
switch x := n.(type) {
|
||||
case *ast.GenDecl:
|
||||
for _, spec := range x.Specs {
|
||||
if ts, ok := spec.(*ast.TypeSpec); ok && ts.TypeParams != nil {
|
||||
pos := fset.Position(ts.Pos())
|
||||
info := GenericInfo{
|
||||
Name: ts.Name.Name,
|
||||
Kind: "type",
|
||||
Package: file.Name.Name,
|
||||
Position: newPosition(pos),
|
||||
}
|
||||
|
||||
// Extract type parameters
|
||||
for _, param := range ts.TypeParams.List {
|
||||
for _, name := range param.Names {
|
||||
namePos := fset.Position(name.Pos())
|
||||
tp := TypeParam{
|
||||
Name: name.Name,
|
||||
Position: newPosition(namePos),
|
||||
}
|
||||
if param.Type != nil {
|
||||
tp.Constraint = exprToString(param.Type)
|
||||
}
|
||||
info.TypeParams = append(info.TypeParams, tp)
|
||||
}
|
||||
}
|
||||
|
||||
generics = append(generics, info)
|
||||
}
|
||||
}
|
||||
|
||||
case *ast.FuncDecl:
|
||||
if x.Type.TypeParams != nil {
|
||||
pos := fset.Position(x.Pos())
|
||||
info := GenericInfo{
|
||||
Name: x.Name.Name,
|
||||
Kind: "function",
|
||||
Package: file.Name.Name,
|
||||
Position: newPosition(pos),
|
||||
}
|
||||
|
||||
// Extract type parameters
|
||||
for _, param := range x.Type.TypeParams.List {
|
||||
for _, name := range param.Names {
|
||||
namePos := fset.Position(name.Pos())
|
||||
tp := TypeParam{
|
||||
Name: name.Name,
|
||||
Position: newPosition(namePos),
|
||||
}
|
||||
if param.Type != nil {
|
||||
tp.Constraint = exprToString(param.Type)
|
||||
}
|
||||
info.TypeParams = append(info.TypeParams, tp)
|
||||
}
|
||||
}
|
||||
|
||||
generics = append(generics, info)
|
||||
}
|
||||
}
|
||||
return true
|
||||
})
|
||||
return nil
|
||||
})
|
||||
|
||||
return generics, err
|
||||
}
|
||||
98
tool_find_imports.go
Normal file
98
tool_find_imports.go
Normal file
@@ -0,0 +1,98 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"go/ast"
|
||||
"go/token"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// Import analysis types
|
||||
type ImportInfo struct {
|
||||
Package string `json:"package"`
|
||||
File string `json:"file"`
|
||||
Imports []ImportDetail `json:"imports"`
|
||||
UnusedImports []string `json:"unused_imports,omitempty"`
|
||||
}
|
||||
|
||||
type ImportDetail struct {
|
||||
Path string `json:"path"`
|
||||
Alias string `json:"alias,omitempty"`
|
||||
Used []string `json:"used_symbols,omitempty"`
|
||||
Position Position `json:"position"`
|
||||
}
|
||||
|
||||
func findImports(dir string) ([]ImportInfo, error) {
|
||||
var imports []ImportInfo
|
||||
|
||||
err := walkGoFiles(dir, func(path string, src []byte, file *ast.File, fset *token.FileSet) error {
|
||||
info := ImportInfo{
|
||||
Package: file.Name.Name,
|
||||
File: path,
|
||||
Imports: []ImportDetail{},
|
||||
}
|
||||
|
||||
// Collect all imports
|
||||
importMap := make(map[string]*ImportDetail)
|
||||
for _, imp := range file.Imports {
|
||||
importPath := strings.Trim(imp.Path.Value, `"`)
|
||||
pos := fset.Position(imp.Pos())
|
||||
detail := &ImportDetail{
|
||||
Path: importPath,
|
||||
Position: newPosition(pos),
|
||||
}
|
||||
if imp.Name != nil {
|
||||
detail.Alias = imp.Name.Name
|
||||
}
|
||||
importMap[importPath] = detail
|
||||
info.Imports = append(info.Imports, *detail)
|
||||
}
|
||||
|
||||
// Track which imports are used
|
||||
usedImports := make(map[string]map[string]bool)
|
||||
ast.Inspect(file, func(n ast.Node) bool {
|
||||
switch x := n.(type) {
|
||||
case *ast.SelectorExpr:
|
||||
if ident, ok := x.X.(*ast.Ident); ok {
|
||||
pkgName := ident.Name
|
||||
symbol := x.Sel.Name
|
||||
|
||||
// Find matching import
|
||||
for importPath, detail := range importMap {
|
||||
importName := filepath.Base(importPath)
|
||||
if detail.Alias != "" && detail.Alias == pkgName {
|
||||
if usedImports[importPath] == nil {
|
||||
usedImports[importPath] = make(map[string]bool)
|
||||
}
|
||||
usedImports[importPath][symbol] = true
|
||||
} else if importName == pkgName {
|
||||
if usedImports[importPath] == nil {
|
||||
usedImports[importPath] = make(map[string]bool)
|
||||
}
|
||||
usedImports[importPath][symbol] = true
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return true
|
||||
})
|
||||
|
||||
// Update import details with used symbols
|
||||
for i, imp := range info.Imports {
|
||||
if used, ok := usedImports[imp.Path]; ok {
|
||||
for symbol := range used {
|
||||
info.Imports[i].Used = append(info.Imports[i].Used, symbol)
|
||||
}
|
||||
} else if !strings.HasSuffix(imp.Path, "_test") && imp.Alias != "_" {
|
||||
info.UnusedImports = append(info.UnusedImports, imp.Path)
|
||||
}
|
||||
}
|
||||
|
||||
if len(info.Imports) > 0 {
|
||||
imports = append(imports, info)
|
||||
}
|
||||
return nil
|
||||
})
|
||||
|
||||
return imports, err
|
||||
}
|
||||
93
tool_find_inefficiencies.go
Normal file
93
tool_find_inefficiencies.go
Normal file
@@ -0,0 +1,93 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"go/ast"
|
||||
"go/token"
|
||||
)
|
||||
|
||||
// Performance inefficiency types
|
||||
type InefficiencyInfo struct {
|
||||
File string `json:"file"`
|
||||
StringConcat []InefficiencyItem `json:"string_concat,omitempty"`
|
||||
Conversions []InefficiencyItem `json:"unnecessary_conversions,omitempty"`
|
||||
Allocations []InefficiencyItem `json:"potential_allocations,omitempty"`
|
||||
}
|
||||
|
||||
type InefficiencyItem struct {
|
||||
Type string `json:"type"`
|
||||
Description string `json:"description"`
|
||||
Suggestion string `json:"suggestion"`
|
||||
Position Position `json:"position"`
|
||||
}
|
||||
|
||||
func findInefficiencies(dir string) ([]InefficiencyInfo, error) {
|
||||
var inefficiencies []InefficiencyInfo
|
||||
|
||||
err := walkGoFiles(dir, func(path string, src []byte, file *ast.File, fset *token.FileSet) error {
|
||||
info := InefficiencyInfo{
|
||||
File: path,
|
||||
}
|
||||
|
||||
ast.Inspect(file, func(n ast.Node) bool {
|
||||
// Find string concatenation in loops
|
||||
if forStmt, ok := n.(*ast.ForStmt); ok {
|
||||
ast.Inspect(forStmt.Body, func(inner ast.Node) bool {
|
||||
if binExpr, ok := inner.(*ast.BinaryExpr); ok && binExpr.Op == token.ADD {
|
||||
if isStringType(binExpr.X) || isStringType(binExpr.Y) {
|
||||
pos := fset.Position(binExpr.Pos())
|
||||
info.StringConcat = append(info.StringConcat, InefficiencyItem{
|
||||
Type: "string_concatenation_in_loop",
|
||||
Description: "String concatenation in loop can be inefficient",
|
||||
Suggestion: "Consider using strings.Builder",
|
||||
Position: newPosition(pos),
|
||||
})
|
||||
}
|
||||
}
|
||||
return true
|
||||
})
|
||||
}
|
||||
|
||||
// Find unnecessary type conversions
|
||||
if callExpr, ok := n.(*ast.CallExpr); ok {
|
||||
if len(callExpr.Args) == 1 {
|
||||
if ident, ok := callExpr.Fun.(*ast.Ident); ok {
|
||||
argType := getExprType(callExpr.Args[0])
|
||||
if ident.Name == argType {
|
||||
pos := fset.Position(callExpr.Pos())
|
||||
info.Conversions = append(info.Conversions, InefficiencyItem{
|
||||
Type: "unnecessary_conversion",
|
||||
Description: fmt.Sprintf("Unnecessary conversion to %s", ident.Name),
|
||||
Suggestion: "Remove unnecessary type conversion",
|
||||
Position: newPosition(pos),
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return true
|
||||
})
|
||||
|
||||
if len(info.StringConcat) > 0 || len(info.Conversions) > 0 || len(info.Allocations) > 0 {
|
||||
inefficiencies = append(inefficiencies, info)
|
||||
}
|
||||
return nil
|
||||
})
|
||||
|
||||
return inefficiencies, err
|
||||
}
|
||||
|
||||
func isStringType(expr ast.Expr) bool {
|
||||
if ident, ok := expr.(*ast.Ident); ok {
|
||||
return ident.Name == "string"
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func getExprType(expr ast.Expr) string {
|
||||
if ident, ok := expr.(*ast.Ident); ok {
|
||||
return ident.Name
|
||||
}
|
||||
return "unknown"
|
||||
}
|
||||
92
tool_find_missing_tests.go
Normal file
92
tool_find_missing_tests.go
Normal file
@@ -0,0 +1,92 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"go/ast"
|
||||
"go/token"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// Missing tests types
|
||||
type MissingTestInfo struct {
|
||||
Function string `json:"function"`
|
||||
Package string `json:"package"`
|
||||
Complexity int `json:"complexity"`
|
||||
Criticality string `json:"criticality"`
|
||||
Reason string `json:"reason"`
|
||||
Position Position `json:"position"`
|
||||
}
|
||||
|
||||
func findMissingTests(dir string) ([]MissingTestInfo, error) {
|
||||
var missingTests []MissingTestInfo
|
||||
|
||||
// Get all exported functions
|
||||
exportedFuncs := make(map[string]*ExportedFunc)
|
||||
testedFuncs := make(map[string]bool)
|
||||
|
||||
// Collect exported functions
|
||||
err := walkGoFiles(dir, func(path string, src []byte, file *ast.File, fset *token.FileSet) error {
|
||||
if strings.HasSuffix(path, "_test.go") {
|
||||
// Track tested functions
|
||||
for _, decl := range file.Decls {
|
||||
if fn, ok := decl.(*ast.FuncDecl); ok && strings.HasPrefix(fn.Name.Name, "Test") {
|
||||
testedFunc := strings.TrimPrefix(fn.Name.Name, "Test")
|
||||
testedFuncs[file.Name.Name+"."+testedFunc] = true
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// Collect exported functions
|
||||
for _, decl := range file.Decls {
|
||||
if fn, ok := decl.(*ast.FuncDecl); ok && ast.IsExported(fn.Name.Name) {
|
||||
pos := fset.Position(fn.Pos())
|
||||
key := file.Name.Name + "." + fn.Name.Name
|
||||
exportedFuncs[key] = &ExportedFunc{
|
||||
Name: fn.Name.Name,
|
||||
Package: file.Name.Name,
|
||||
Position: newPosition(pos),
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Find missing tests
|
||||
for key, fn := range exportedFuncs {
|
||||
if !testedFuncs[key] {
|
||||
complexity := calculateComplexity(fn.Name)
|
||||
criticality := determineCriticality(fn.Name)
|
||||
|
||||
missingTests = append(missingTests, MissingTestInfo{
|
||||
Function: fn.Name,
|
||||
Package: fn.Package,
|
||||
Complexity: complexity,
|
||||
Criticality: criticality,
|
||||
Reason: "No test found for exported function",
|
||||
Position: fn.Position,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
return missingTests, nil
|
||||
}
|
||||
|
||||
func calculateComplexity(funcName string) int {
|
||||
// Simplified complexity calculation
|
||||
return len(funcName) % 10 + 1
|
||||
}
|
||||
|
||||
func determineCriticality(funcName string) string {
|
||||
name := strings.ToLower(funcName)
|
||||
if strings.Contains(name, "delete") || strings.Contains(name, "remove") {
|
||||
return "high"
|
||||
}
|
||||
if strings.Contains(name, "create") || strings.Contains(name, "update") {
|
||||
return "medium"
|
||||
}
|
||||
return "low"
|
||||
}
|
||||
73
tool_find_patterns.go
Normal file
73
tool_find_patterns.go
Normal file
@@ -0,0 +1,73 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"go/ast"
|
||||
"go/token"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// Design pattern types
|
||||
type PatternInfo struct {
|
||||
Pattern string `json:"pattern"`
|
||||
Occurrences []PatternOccurrence `json:"occurrences"`
|
||||
}
|
||||
|
||||
type PatternOccurrence struct {
|
||||
File string `json:"file"`
|
||||
Description string `json:"description"`
|
||||
Quality string `json:"quality"`
|
||||
Position Position `json:"position"`
|
||||
}
|
||||
|
||||
func findPatterns(dir string) ([]PatternInfo, error) {
|
||||
var patterns []PatternInfo
|
||||
|
||||
err := walkGoFiles(dir, func(path string, src []byte, file *ast.File, fset *token.FileSet) error {
|
||||
// Look for singleton pattern
|
||||
singletonPattern := PatternInfo{Pattern: "singleton"}
|
||||
|
||||
// Look for factory pattern
|
||||
factoryPattern := PatternInfo{Pattern: "factory"}
|
||||
|
||||
ast.Inspect(file, func(n ast.Node) bool {
|
||||
if fn, ok := n.(*ast.FuncDecl); ok {
|
||||
name := strings.ToLower(fn.Name.Name)
|
||||
|
||||
// Detect factory pattern
|
||||
if strings.HasPrefix(name, "new") || strings.HasPrefix(name, "create") {
|
||||
pos := fset.Position(fn.Pos())
|
||||
factoryPattern.Occurrences = append(factoryPattern.Occurrences, PatternOccurrence{
|
||||
File: path,
|
||||
Description: fmt.Sprintf("Factory function: %s", fn.Name.Name),
|
||||
Quality: "good",
|
||||
Position: newPosition(pos),
|
||||
})
|
||||
}
|
||||
|
||||
// Detect singleton pattern (simplified)
|
||||
if strings.Contains(name, "instance") && fn.Type.Results != nil {
|
||||
pos := fset.Position(fn.Pos())
|
||||
singletonPattern.Occurrences = append(singletonPattern.Occurrences, PatternOccurrence{
|
||||
File: path,
|
||||
Description: fmt.Sprintf("Potential singleton: %s", fn.Name.Name),
|
||||
Quality: "review",
|
||||
Position: newPosition(pos),
|
||||
})
|
||||
}
|
||||
}
|
||||
return true
|
||||
})
|
||||
|
||||
if len(singletonPattern.Occurrences) > 0 {
|
||||
patterns = append(patterns, singletonPattern)
|
||||
}
|
||||
if len(factoryPattern.Occurrences) > 0 {
|
||||
patterns = append(patterns, factoryPattern)
|
||||
}
|
||||
|
||||
return nil
|
||||
})
|
||||
|
||||
return patterns, err
|
||||
}
|
||||
57
tool_find_references.go
Normal file
57
tool_find_references.go
Normal file
@@ -0,0 +1,57 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"go/ast"
|
||||
"go/token"
|
||||
)
|
||||
|
||||
type Reference struct {
|
||||
Context string `json:"context"`
|
||||
Kind string `json:"kind"`
|
||||
Position Position `json:"position"`
|
||||
}
|
||||
|
||||
func findReferences(dir string, symbol string) ([]Reference, error) {
|
||||
var refs []Reference
|
||||
|
||||
err := walkGoFiles(dir, func(path string, src []byte, file *ast.File, fset *token.FileSet) error {
|
||||
|
||||
ast.Inspect(file, func(n ast.Node) bool {
|
||||
switch node := n.(type) {
|
||||
case *ast.Ident:
|
||||
if node.Name == symbol {
|
||||
pos := fset.Position(node.Pos())
|
||||
kind := identifyReferenceKind(node)
|
||||
context := extractContext(src, pos)
|
||||
|
||||
refs = append(refs, Reference{
|
||||
Context: context,
|
||||
Kind: kind,
|
||||
Position: newPosition(pos),
|
||||
})
|
||||
}
|
||||
|
||||
case *ast.SelectorExpr:
|
||||
if node.Sel.Name == symbol {
|
||||
pos := fset.Position(node.Sel.Pos())
|
||||
context := extractContext(src, pos)
|
||||
|
||||
refs = append(refs, Reference{
|
||||
Context: context,
|
||||
Kind: "selector",
|
||||
Position: newPosition(pos),
|
||||
})
|
||||
}
|
||||
}
|
||||
return true
|
||||
})
|
||||
|
||||
return nil
|
||||
})
|
||||
|
||||
return refs, err
|
||||
}
|
||||
|
||||
func identifyReferenceKind(ident *ast.Ident) string {
|
||||
return "identifier"
|
||||
}
|
||||
110
tool_find_struct_usage.go
Normal file
110
tool_find_struct_usage.go
Normal file
@@ -0,0 +1,110 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"go/ast"
|
||||
"go/token"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// Struct usage types
|
||||
type StructUsage struct {
|
||||
File string `json:"file"`
|
||||
Literals []StructLiteral `json:"literals,omitempty"`
|
||||
FieldAccess []FieldAccess `json:"field_access,omitempty"`
|
||||
TypeUsage []TypeUsage `json:"type_usage,omitempty"`
|
||||
}
|
||||
|
||||
type StructLiteral struct {
|
||||
Fields []string `json:"fields_initialized"`
|
||||
IsComposite bool `json:"is_composite"`
|
||||
Position Position `json:"position"`
|
||||
}
|
||||
|
||||
type FieldAccess struct {
|
||||
Field string `json:"field"`
|
||||
Context string `json:"context"`
|
||||
Position Position `json:"position"`
|
||||
}
|
||||
|
||||
type TypeUsage struct {
|
||||
Usage string `json:"usage"`
|
||||
Position Position `json:"position"`
|
||||
}
|
||||
|
||||
func findStructUsage(dir string, structName string) ([]StructUsage, error) {
|
||||
var usages []StructUsage
|
||||
|
||||
err := walkGoFiles(dir, func(path string, src []byte, file *ast.File, fset *token.FileSet) error {
|
||||
usage := StructUsage{
|
||||
File: path,
|
||||
}
|
||||
|
||||
ast.Inspect(file, func(n ast.Node) bool {
|
||||
switch x := n.(type) {
|
||||
// Find struct literals
|
||||
case *ast.CompositeLit:
|
||||
if typeName := getTypeName(x.Type); typeName == structName {
|
||||
pos := fset.Position(x.Pos())
|
||||
lit := StructLiteral{
|
||||
IsComposite: len(x.Elts) > 0,
|
||||
Position: newPosition(pos),
|
||||
}
|
||||
|
||||
// Extract initialized fields
|
||||
for _, elt := range x.Elts {
|
||||
if kv, ok := elt.(*ast.KeyValueExpr); ok {
|
||||
if ident, ok := kv.Key.(*ast.Ident); ok {
|
||||
lit.Fields = append(lit.Fields, ident.Name)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
usage.Literals = append(usage.Literals, lit)
|
||||
}
|
||||
|
||||
// Find field access
|
||||
case *ast.SelectorExpr:
|
||||
if typeName := getTypeName(x.X); strings.Contains(typeName, structName) {
|
||||
pos := fset.Position(x.Sel.Pos())
|
||||
context := extractContext(src, pos)
|
||||
|
||||
usage.FieldAccess = append(usage.FieldAccess, FieldAccess{
|
||||
Field: x.Sel.Name,
|
||||
Context: context,
|
||||
Position: newPosition(pos),
|
||||
})
|
||||
}
|
||||
|
||||
// Find type usage in declarations
|
||||
case *ast.Field:
|
||||
if typeName := getTypeName(x.Type); typeName == structName {
|
||||
pos := fset.Position(x.Pos())
|
||||
usage.TypeUsage = append(usage.TypeUsage, TypeUsage{
|
||||
Usage: "field",
|
||||
Position: newPosition(pos),
|
||||
})
|
||||
}
|
||||
}
|
||||
return true
|
||||
})
|
||||
|
||||
if len(usage.Literals) > 0 || len(usage.FieldAccess) > 0 || len(usage.TypeUsage) > 0 {
|
||||
usages = append(usages, usage)
|
||||
}
|
||||
return nil
|
||||
})
|
||||
|
||||
return usages, err
|
||||
}
|
||||
|
||||
func getTypeName(expr ast.Expr) string {
|
||||
switch x := expr.(type) {
|
||||
case *ast.Ident:
|
||||
return x.Name
|
||||
case *ast.StarExpr:
|
||||
return getTypeName(x.X)
|
||||
case *ast.SelectorExpr:
|
||||
return exprToString(x)
|
||||
}
|
||||
return ""
|
||||
}
|
||||
101
tool_find_symbols.go
Normal file
101
tool_find_symbols.go
Normal file
@@ -0,0 +1,101 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"go/ast"
|
||||
"go/token"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type Symbol struct {
|
||||
Name string `json:"name"`
|
||||
Type string `json:"type"`
|
||||
Package string `json:"package"`
|
||||
Exported bool `json:"exported"`
|
||||
Position Position `json:"position"`
|
||||
}
|
||||
|
||||
func findSymbols(dir string, pattern string) ([]Symbol, error) {
|
||||
var symbols []Symbol
|
||||
|
||||
err := walkGoFiles(dir, func(path string, src []byte, file *ast.File, fset *token.FileSet) error {
|
||||
if strings.HasSuffix(path, "_test.go") && !strings.Contains(pattern, "Test") {
|
||||
return nil
|
||||
}
|
||||
|
||||
pkgName := file.Name.Name
|
||||
|
||||
ast.Inspect(file, func(n ast.Node) bool {
|
||||
switch decl := n.(type) {
|
||||
case *ast.FuncDecl:
|
||||
name := decl.Name.Name
|
||||
if matchesPattern(name, pattern) {
|
||||
pos := fset.Position(decl.Pos())
|
||||
symbols = append(symbols, Symbol{
|
||||
Name: name,
|
||||
Type: "function",
|
||||
Package: pkgName,
|
||||
Exported: ast.IsExported(name),
|
||||
Position: newPosition(pos),
|
||||
})
|
||||
}
|
||||
|
||||
case *ast.GenDecl:
|
||||
for _, spec := range decl.Specs {
|
||||
switch s := spec.(type) {
|
||||
case *ast.TypeSpec:
|
||||
name := s.Name.Name
|
||||
if matchesPattern(name, pattern) {
|
||||
pos := fset.Position(s.Pos())
|
||||
kind := "type"
|
||||
switch s.Type.(type) {
|
||||
case *ast.InterfaceType:
|
||||
kind = "interface"
|
||||
case *ast.StructType:
|
||||
kind = "struct"
|
||||
}
|
||||
symbols = append(symbols, Symbol{
|
||||
Name: name,
|
||||
Type: kind,
|
||||
Package: pkgName,
|
||||
Exported: ast.IsExported(name),
|
||||
Position: newPosition(pos),
|
||||
})
|
||||
}
|
||||
|
||||
case *ast.ValueSpec:
|
||||
for _, name := range s.Names {
|
||||
if matchesPattern(name.Name, pattern) {
|
||||
pos := fset.Position(name.Pos())
|
||||
kind := "variable"
|
||||
if decl.Tok == token.CONST {
|
||||
kind = "constant"
|
||||
}
|
||||
symbols = append(symbols, Symbol{
|
||||
Name: name.Name,
|
||||
Type: kind,
|
||||
Package: pkgName,
|
||||
Exported: ast.IsExported(name.Name),
|
||||
Position: newPosition(pos),
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return true
|
||||
})
|
||||
|
||||
return nil
|
||||
})
|
||||
|
||||
return symbols, err
|
||||
}
|
||||
|
||||
func matchesPattern(name, pattern string) bool {
|
||||
if pattern == "" {
|
||||
return true
|
||||
}
|
||||
pattern = strings.ToLower(pattern)
|
||||
name = strings.ToLower(name)
|
||||
return strings.Contains(name, pattern)
|
||||
}
|
||||
123
tool_generate_docs.go
Normal file
123
tool_generate_docs.go
Normal file
@@ -0,0 +1,123 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// Documentation types
|
||||
type DocInfo struct {
|
||||
Package string `json:"package"`
|
||||
Overview string `json:"overview"`
|
||||
Functions []DocFunction `json:"functions"`
|
||||
Types []DocType `json:"types"`
|
||||
}
|
||||
|
||||
type DocFunction struct {
|
||||
Name string `json:"name"`
|
||||
Signature string `json:"signature"`
|
||||
Description string `json:"description"`
|
||||
Parameters []string `json:"parameters,omitempty"`
|
||||
Returns []string `json:"returns,omitempty"`
|
||||
Examples []string `json:"examples,omitempty"`
|
||||
Position Position `json:"position"`
|
||||
}
|
||||
|
||||
type DocType struct {
|
||||
Name string `json:"name"`
|
||||
Kind string `json:"kind"`
|
||||
Description string `json:"description"`
|
||||
Fields []DocField `json:"fields,omitempty"`
|
||||
Methods []DocMethod `json:"methods,omitempty"`
|
||||
Position Position `json:"position"`
|
||||
}
|
||||
|
||||
type DocField struct {
|
||||
Name string `json:"name"`
|
||||
Type string `json:"type"`
|
||||
Description string `json:"description"`
|
||||
}
|
||||
|
||||
type DocMethod struct {
|
||||
Name string `json:"name"`
|
||||
Signature string `json:"signature"`
|
||||
Description string `json:"description"`
|
||||
}
|
||||
|
||||
func generateDocs(dir string, format string) (interface{}, error) {
|
||||
if format == "markdown" {
|
||||
return generateMarkdownDocs(dir)
|
||||
}
|
||||
return generateJsonDocs(dir)
|
||||
}
|
||||
|
||||
func generateMarkdownDocs(dir string) (string, error) {
|
||||
apis, err := extractApi(dir)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
var markdown strings.Builder
|
||||
for _, api := range apis {
|
||||
markdown.WriteString(fmt.Sprintf("# Package %s\n\n", api.Package))
|
||||
|
||||
if len(api.Functions) > 0 {
|
||||
markdown.WriteString("## Functions\n\n")
|
||||
for _, fn := range api.Functions {
|
||||
markdown.WriteString(fmt.Sprintf("### %s\n\n", fn.Name))
|
||||
markdown.WriteString(fmt.Sprintf("```go\n%s\n```\n\n", fn.Signature))
|
||||
if fn.Doc != "" {
|
||||
markdown.WriteString(fmt.Sprintf("%s\n\n", fn.Doc))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if len(api.Types) > 0 {
|
||||
markdown.WriteString("## Types\n\n")
|
||||
for _, typ := range api.Types {
|
||||
markdown.WriteString(fmt.Sprintf("### %s\n\n", typ.Name))
|
||||
if typ.Doc != "" {
|
||||
markdown.WriteString(fmt.Sprintf("%s\n\n", typ.Doc))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return markdown.String(), nil
|
||||
}
|
||||
|
||||
func generateJsonDocs(dir string) ([]DocInfo, error) {
|
||||
apis, err := extractApi(dir)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var docs []DocInfo
|
||||
for _, api := range apis {
|
||||
doc := DocInfo{
|
||||
Package: api.Package,
|
||||
}
|
||||
|
||||
for _, fn := range api.Functions {
|
||||
doc.Functions = append(doc.Functions, DocFunction{
|
||||
Name: fn.Name,
|
||||
Signature: fn.Signature,
|
||||
Description: fn.Doc,
|
||||
Position: fn.Position,
|
||||
})
|
||||
}
|
||||
|
||||
for _, typ := range api.Types {
|
||||
doc.Types = append(doc.Types, DocType{
|
||||
Name: typ.Name,
|
||||
Kind: typ.Kind,
|
||||
Description: typ.Doc,
|
||||
Position: typ.Position,
|
||||
})
|
||||
}
|
||||
|
||||
docs = append(docs, doc)
|
||||
}
|
||||
|
||||
return docs, nil
|
||||
}
|
||||
197
tool_get_type_info.go
Normal file
197
tool_get_type_info.go
Normal file
@@ -0,0 +1,197 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"go/ast"
|
||||
"go/token"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type TypeInfo struct {
|
||||
Name string `json:"name"`
|
||||
Package string `json:"package"`
|
||||
Kind string `json:"kind"`
|
||||
Position Position `json:"position"`
|
||||
Fields []FieldInfo `json:"fields,omitempty"`
|
||||
Methods []MethodInfo `json:"methods,omitempty"`
|
||||
Embedded []string `json:"embedded,omitempty"`
|
||||
Interface []MethodInfo `json:"interface,omitempty"`
|
||||
Underlying string `json:"underlying,omitempty"`
|
||||
}
|
||||
|
||||
type FieldInfo struct {
|
||||
Name string `json:"name"`
|
||||
Type string `json:"type"`
|
||||
Tag string `json:"tag,omitempty"`
|
||||
Exported bool `json:"exported"`
|
||||
Position Position `json:"position"`
|
||||
}
|
||||
|
||||
type MethodInfo struct {
|
||||
Name string `json:"name"`
|
||||
Signature string `json:"signature"`
|
||||
Receiver string `json:"receiver,omitempty"`
|
||||
Exported bool `json:"exported"`
|
||||
Position Position `json:"position"`
|
||||
}
|
||||
|
||||
func getTypeInfo(dir string, typeName string) (*TypeInfo, error) {
|
||||
var result *TypeInfo
|
||||
|
||||
err := walkGoFiles(dir, func(path string, src []byte, file *ast.File, fset *token.FileSet) error {
|
||||
if result != nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
ast.Inspect(file, func(n ast.Node) bool {
|
||||
if result != nil {
|
||||
return false
|
||||
}
|
||||
|
||||
switch decl := n.(type) {
|
||||
case *ast.GenDecl:
|
||||
for _, spec := range decl.Specs {
|
||||
if ts, ok := spec.(*ast.TypeSpec); ok && ts.Name.Name == typeName {
|
||||
pos := fset.Position(ts.Pos())
|
||||
info := &TypeInfo{
|
||||
Name: typeName,
|
||||
Package: file.Name.Name,
|
||||
Position: newPosition(pos),
|
||||
}
|
||||
|
||||
switch t := ts.Type.(type) {
|
||||
case *ast.StructType:
|
||||
info.Kind = "struct"
|
||||
info.Fields = extractFields(t, fset)
|
||||
info.Embedded = extractEmbedded(t)
|
||||
|
||||
case *ast.InterfaceType:
|
||||
info.Kind = "interface"
|
||||
info.Interface = extractInterfaceMethods(t, fset)
|
||||
|
||||
case *ast.Ident:
|
||||
info.Kind = "alias"
|
||||
info.Underlying = t.Name
|
||||
|
||||
case *ast.SelectorExpr:
|
||||
info.Kind = "alias"
|
||||
if x, ok := t.X.(*ast.Ident); ok {
|
||||
info.Underlying = x.Name + "." + t.Sel.Name
|
||||
}
|
||||
|
||||
default:
|
||||
info.Kind = "other"
|
||||
}
|
||||
|
||||
info.Methods = extractMethods(file, typeName, fset)
|
||||
result = info
|
||||
return false
|
||||
}
|
||||
}
|
||||
}
|
||||
return true
|
||||
})
|
||||
|
||||
return nil
|
||||
})
|
||||
|
||||
if result == nil && err == nil {
|
||||
return nil, fmt.Errorf("type %s not found", typeName)
|
||||
}
|
||||
|
||||
return result, err
|
||||
}
|
||||
|
||||
func extractFields(st *ast.StructType, fset *token.FileSet) []FieldInfo {
|
||||
var fields []FieldInfo
|
||||
|
||||
for _, field := range st.Fields.List {
|
||||
fieldType := exprToString(field.Type)
|
||||
tag := ""
|
||||
if field.Tag != nil {
|
||||
tag = field.Tag.Value
|
||||
}
|
||||
|
||||
if len(field.Names) == 0 {
|
||||
pos := fset.Position(field.Pos())
|
||||
fields = append(fields, FieldInfo{
|
||||
Name: "",
|
||||
Type: fieldType,
|
||||
Tag: tag,
|
||||
Exported: true,
|
||||
Position: newPosition(pos),
|
||||
})
|
||||
} else {
|
||||
for _, name := range field.Names {
|
||||
pos := fset.Position(name.Pos())
|
||||
fields = append(fields, FieldInfo{
|
||||
Name: name.Name,
|
||||
Type: fieldType,
|
||||
Tag: tag,
|
||||
Exported: ast.IsExported(name.Name),
|
||||
Position: newPosition(pos),
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return fields
|
||||
}
|
||||
|
||||
func extractEmbedded(st *ast.StructType) []string {
|
||||
var embedded []string
|
||||
|
||||
for _, field := range st.Fields.List {
|
||||
if len(field.Names) == 0 {
|
||||
embedded = append(embedded, exprToString(field.Type))
|
||||
}
|
||||
}
|
||||
|
||||
return embedded
|
||||
}
|
||||
|
||||
func extractInterfaceMethods(it *ast.InterfaceType, fset *token.FileSet) []MethodInfo {
|
||||
var methods []MethodInfo
|
||||
|
||||
for _, method := range it.Methods.List {
|
||||
if len(method.Names) > 0 {
|
||||
for _, name := range method.Names {
|
||||
sig := exprToString(method.Type)
|
||||
pos := fset.Position(name.Pos())
|
||||
methods = append(methods, MethodInfo{
|
||||
Name: name.Name,
|
||||
Signature: sig,
|
||||
Exported: ast.IsExported(name.Name),
|
||||
Position: newPosition(pos),
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return methods
|
||||
}
|
||||
|
||||
func extractMethods(file *ast.File, typeName string, fset *token.FileSet) []MethodInfo {
|
||||
var methods []MethodInfo
|
||||
|
||||
for _, decl := range file.Decls {
|
||||
if fn, ok := decl.(*ast.FuncDecl); ok && fn.Recv != nil {
|
||||
for _, recv := range fn.Recv.List {
|
||||
recvType := exprToString(recv.Type)
|
||||
if strings.Contains(recvType, typeName) {
|
||||
sig := funcSignature(fn.Type)
|
||||
pos := fset.Position(fn.Name.Pos())
|
||||
methods = append(methods, MethodInfo{
|
||||
Name: fn.Name.Name,
|
||||
Signature: sig,
|
||||
Receiver: recvType,
|
||||
Exported: ast.IsExported(fn.Name.Name),
|
||||
Position: newPosition(pos),
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return methods
|
||||
}
|
||||
81
tool_list_packages.go
Normal file
81
tool_list_packages.go
Normal file
@@ -0,0 +1,81 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"go/ast"
|
||||
"go/token"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type Package struct {
|
||||
ImportPath string `json:"import_path"`
|
||||
Name string `json:"name"`
|
||||
Dir string `json:"dir"`
|
||||
GoFiles []string `json:"go_files"`
|
||||
Imports []string `json:"imports"`
|
||||
}
|
||||
|
||||
func listPackages(dir string, includeTests bool) ([]Package, error) {
|
||||
packages := make(map[string]*Package)
|
||||
|
||||
err := walkGoFiles(dir, func(path string, src []byte, file *ast.File, fset *token.FileSet) error {
|
||||
// Skip test files if not requested
|
||||
if !includeTests && strings.HasSuffix(path, "_test.go") {
|
||||
return nil
|
||||
}
|
||||
|
||||
pkgDir := filepath.Dir(path)
|
||||
|
||||
// Initialize package if not seen before
|
||||
if _, exists := packages[pkgDir]; !exists {
|
||||
importPath := strings.TrimPrefix(pkgDir, dir)
|
||||
importPath = strings.TrimPrefix(importPath, "/")
|
||||
if importPath == "" {
|
||||
importPath = "."
|
||||
}
|
||||
|
||||
packages[pkgDir] = &Package{
|
||||
ImportPath: importPath,
|
||||
Name: file.Name.Name,
|
||||
Dir: pkgDir,
|
||||
GoFiles: []string{},
|
||||
Imports: []string{},
|
||||
}
|
||||
}
|
||||
|
||||
// Add file to package
|
||||
fileName := filepath.Base(path)
|
||||
packages[pkgDir].GoFiles = append(packages[pkgDir].GoFiles, fileName)
|
||||
|
||||
// Collect unique imports
|
||||
imports := make(map[string]bool)
|
||||
for _, imp := range file.Imports {
|
||||
importPath := strings.Trim(imp.Path.Value, `"`)
|
||||
imports[importPath] = true
|
||||
}
|
||||
|
||||
// Merge imports into package
|
||||
existingImports := make(map[string]bool)
|
||||
for _, imp := range packages[pkgDir].Imports {
|
||||
existingImports[imp] = true
|
||||
}
|
||||
for imp := range imports {
|
||||
if !existingImports[imp] {
|
||||
packages[pkgDir].Imports = append(packages[pkgDir].Imports, imp)
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var result []Package
|
||||
for _, pkg := range packages {
|
||||
result = append(result, *pkg)
|
||||
}
|
||||
|
||||
return result, nil
|
||||
}
|
||||
Reference in New Issue
Block a user