package main import ( "fmt" "log" "os" "path/filepath" "sync" "time" "strings" "io/ioutil" "io" "sort" "encoding/hex" "golang.org/x/crypto/sha3" "bufio" ) const ( driveLetter = "" resultFilename = "results.log" dbFilename = "results.db" ) type fileInfo struct { path string name string modTime time.Time isDir bool processed bool } func main() { var wg sync.WaitGroup results := make(chan fileInfo) // Parse command line arguments var driveLetter string var excludePatterns []string var initFlag bool // Parse arguments in any order for i := 1; i < len(os.Args); i++ { arg := os.Args[i] switch arg { case "-exclude": if i+1 < len(os.Args) { excludePatterns = append(excludePatterns, os.Args[i+1]) i++ } case "-init": initFlag = true case "--init": initFlag = true case "-help": printUsage() case "-h": printUsage() case "/?": printUsage() case "--help": printUsage() case "--h": printUsage() default: if driveLetter == "" { driveLetter = arg } else { fmt.Printf("Unknown argument: %s\n", arg) os.Exit(1) } } } if driveLetter == "" { printUsage() } // Start the initial worker wg.Add(1) go scanDir(driveLetter, &wg, results, excludePatterns) // Start the logger logFile, err := os.OpenFile(resultFilename, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 0666) if err != nil { log.Fatalf("failed to open log file: %v", err) } defer logFile.Close() logger := log.New(logFile, "", log.LstdFlags) // Listen for results and log them counts := make(map[string]int) fileCount := 0 go func() { for res := range results { fileCount++ if res.isDir { wg.Add(1) go scanDir(res.path, &wg, results, excludePatterns) continue } day := res.modTime.Format("2006-01-02") minute := res.modTime.Format("15:04") countsKey := fmt.Sprintf("%s|%s", day, minute) counts[countsKey]++ logger.Printf("[%s] %s\n", countsKey, res.path) fmt.Printf("[%s] %s\n", countsKey, res.path) } }() // Wait for all workers to finish before closing the results channel wg.Wait() close(results) // Sort the logfile by the last modified time if err := sortLogFileAfterWorkers(resultFilename); err != nil { log.Fatalf("failed to sort logfile: %v", err) } if err := generateChecksums(resultFilename, initFlag); err != nil { log.Fatalf("failed to generate checksums: %v", err) } //logger.Printf("Total files scanned: %d\n", fileCount) fmt.Printf("Total files scanned: %d\n", fileCount) if _, err := os.Stat(resultFilename); os.IsNotExist(err) { return } if _, err := os.Stat(dbFilename); os.IsNotExist(err) { return } // Call compareFiles function lines, err := compareFiles(resultFilename, dbFilename) if err != nil { fmt.Printf("Error: %v\n", err) return } for _, line := range lines { fmt.Println(line) } missingFiles, err := checkFilesExistence(dbFilename) if err != nil { fmt.Println("Error occurred:", err) return } if len(missingFiles) > 0 { fmt.Println("\nWARNING: The following files do not exist:") for _, filename := range missingFiles { fmt.Println(filename) } } } func compareFiles(resultFilename, dbFilename string) ([]string, error) { // Read the content of resultFilename into tempList tempList, err := ioutil.ReadFile(resultFilename) if err != nil { return nil, err } // Read the content of dbFilename into dbtempList dbtempList, err := ioutil.ReadFile(dbFilename) if err != nil { return nil, err } // Split each line of dbtempList into columns and remove the first 2 columns var dbLines []string for _, line := range strings.Split(string(dbtempList), "\n") { cols := strings.Fields(line) if len(cols) > 2 { dbLines = append(dbLines, strings.Join(cols[2:], " ")) } } // Remove lines from tempList that match any line in dbLines or contain the filenames var uniqueLines []string for _, line := range strings.Split(string(tempList), "\n") { if containsFilename(line, resultFilename) || containsFilename(line, dbFilename) { continue } found := false for _, dbLine := range dbLines { if strings.Contains(line, dbLine) { found = true break } } if !found { uniqueLines = append(uniqueLines, line) } } // Check if uniqueLines is not empty and contains non-empty lines before printing the warning message hasNonEmptyLines := false for _, line := range uniqueLines { if strings.TrimSpace(line) != "" { hasNonEmptyLines = true break } } if hasNonEmptyLines { fmt.Println("\n\nWARNING: Suspicious lines found!!!") } return uniqueLines, nil } func containsFilename(line, filename string) bool { return strings.Contains(line, filename) } func checkFilesExistence(dbFilename string) ([]string, error) { // Open the db file for reading dbFile, err := os.Open(dbFilename) if err != nil { return nil, err } defer dbFile.Close() // Scan the db file line by line and check if the files in the last column exist var missingFiles []string scanner := bufio.NewScanner(dbFile) for scanner.Scan() { line := scanner.Text() fields := strings.Fields(line) if len(fields) > 4 { filename := strings.Join(fields[4:], " ") if _, err := os.Stat(filename); os.IsNotExist(err) { missingFiles = append(missingFiles, filename) } } } return missingFiles, nil } func scanDir(dir string, wg *sync.WaitGroup, results chan fileInfo, excludePatterns []string) { defer wg.Done() files, err := os.ReadDir(dir) if err != nil { log.Printf("failed to read directory: %v", err) return } for _, file := range files { path := filepath.Join(dir, file.Name()) // Check if the file matches any of the exclude patterns excluded := false for _, pattern := range excludePatterns { match, err := filepath.Match(pattern, file.Name()) if err != nil { log.Printf("failed to match exclude pattern: %v", err) continue } if match { excluded = true break } } if excluded { continue } if file.IsDir() { wg.Add(1) go scanDir(path, wg, results, excludePatterns) continue } info, err := file.Info() if err != nil { log.Printf("failed to read file info: %v", err) continue } res := fileInfo{ path: path, name: file.Name(), modTime: info.ModTime(), isDir: false, processed: false, } results <- res } } func sortLogFileAfterWorkers(logFile string) error { // Read the log file into memory content, err := ioutil.ReadFile(logFile) if err != nil { return err } // Split the content into lines lines := strings.Split(string(content), "\n") if len(lines) == 0 { return nil } // Group the lines by date and time groups := make(map[string][]string) for _, line := range lines { if len(line) == 0 { continue } // Extract date and time from the line pi := strings.Index(line, "[") if pi < 0 { continue } datetime := line[pi+1 : pi+1+16] groups[datetime] = append(groups[datetime], line) } // Remove lines that have only one entry in the group var filtered []string for _, group := range groups { if len(group) == 1 { continue } filtered = append(filtered, group...) } // Sort the filtered lines by date and time sort.Slice(filtered, func(i, j int) bool { pi := strings.Index(filtered[i], "[") pj := strings.Index(filtered[j], "[") if pi < 0 || pj < 0 { return false } ti, err := time.Parse("2006-01-02|15:04", filtered[i][pi+1:pi+1+16]) if err != nil { return false } tj, err := time.Parse("2006-01-02|15:04", filtered[j][pj+1:pj+1+16]) if err != nil { return false } return ti.Before(tj) }) // Join the sorted lines result := strings.Join(filtered, "\n") // Write the sorted lines return ioutil.WriteFile(logFile, []byte(result), 0644) } func generateChecksums(logFile string, initFlag bool) error { // Open the log file for reading f, err := os.Open(logFile) if err != nil { return err } defer f.Close() // Read the log file into memory content, err := ioutil.ReadAll(f) if err != nil { return err } // Split the content into lines lines := strings.Split(string(content), "\n") if len(lines) == 0 { return nil } // Remove empty lines cleanedLines := make([]string, 0, len(lines)) for _, line := range lines { if len(line) > 0 { cleanedLines = append(cleanedLines, line) } } // Sort the lines by date and time sort.Slice(cleanedLines, func(i, j int) bool { return cleanedLines[i][1:20] < cleanedLines[j][1:20] }) // Group the lines by date and time groups := make(map[string][]string) for _, line := range cleanedLines { date := line[1:11] groups[date] = append(groups[date], line) } // Calculate the number of workers needed based on the number of unique dates numWorkers := len(groups) // Use a wait group to synchronize the workers var wg sync.WaitGroup wg.Add(numWorkers) // Use a channel to communicate errors back from the workers errc := make(chan error, numWorkers) // Create a temporary file for writing checksums tempFile, err := ioutil.TempFile("", "checksums") if err != nil { return err } defer func() { tempFile.Close() os.Remove(tempFile.Name()) }() // Process each group of lines with a separate worker for _, group := range groups { go func(group []string) { defer wg.Done() for _, line := range group { // Extract path from the line pathStart := strings.Index(line, "] ") + 2 pathEnd := strings.LastIndex(line, "\n") if pathEnd < pathStart { pathEnd = len(line) } path := line[pathStart:pathEnd] // Calculate checksum for the file checksum, err := calculateChecksum(path) if err != nil { errc <- err return } // Append checksum to the line lineWithChecksum := fmt.Sprintf("%s [%s] %s", line[:pathStart-1], checksum, path) // Write the line with checksum to the temporary file if _, err := tempFile.WriteString(lineWithChecksum + "\n"); err != nil { errc <- err return } } }(group) } // Wait for all workers to finish go func() { wg.Wait() close(errc) }() // Check for errors from the workers for err := range errc { if err != nil { return err } } // Close the temporary file if err := tempFile.Close(); err != nil { return err } // Read the content of the temporary file tempContent, err := ioutil.ReadFile(tempFile.Name()) if err != nil { return err } // Write the content to the log file if err := ioutil.WriteFile(logFile, tempContent, 0644); err != nil { return err } if initFlag { // Write the content to the log file if err := ioutil.WriteFile(dbFilename, tempContent, 0644); err != nil { return err } } // Remove the temporary file if err := os.Remove(tempFile.Name()); err != nil { return err } return nil } func calculateChecksum(path string) (string, error) { fileInfo, err := os.Stat(path) if os.IsNotExist(err) { return "not_exist", nil } if err != nil { return "not_exist", err } if !fileInfo.Mode().IsRegular() { // Not a regular file, don't calculate checksum return "no_file", nil } file, err := os.Open(path) if err != nil { // Ignore Permission denied error and continue with next file if os.IsPermission(err) { return "locked", nil } return "no_access", err } defer file.Close() hash := sha3.New256() if _, err := io.Copy(hash, file); err != nil { return "error_checksum", err } checksum := hex.EncodeToString(hash.Sum(nil)) fmt.Printf("Checksum for %s: %s\n", path, checksum) os.Stdout.Sync() return checksum, nil } func printUsage() { fmt.Println("\nUsage: ransch [drive_letter] [-exclude] [-init] [-help|--help|-h|--h|/?]\n") fmt.Println("-exclude: Exclude Directory or Files") fmt.Println("-init: Generate the Initial Database File after Scanning") os.Exit(0) // Exit with a status code of 0 (success) }