Compare commits

..

1 Commits

Author SHA1 Message Date
Christian Zangl
737cfafe66
gif 2024-01-22 11:47:17 +01:00
31 changed files with 0 additions and 2109 deletions

View File

@ -1,16 +0,0 @@
# EditorConfig helps developers define and maintain consistent
# coding styles between different editors and IDEs
# editorconfig.org
# top-most EditorConfig file
root = true
[*]
insert_final_newline = true
charset = utf-8
trim_trailing_whitespace = true
indent_style = space
indent_size = 4
[*.md]
indent_size = 2

1
.github/FUNDING.yml vendored
View File

@ -1 +0,0 @@
github: laktak

View File

@ -1,27 +0,0 @@
name: ci
on:
push:
branches: []
pull_request:
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/setup-go@v5
with:
go-version: "1.22"
- name: chkfmt
run: scripts/chkfmt
- name: tests
run: |
scripts/tests
- name: xbuild
run: scripts/xbuild

View File

@ -1,32 +0,0 @@
name: release
on:
push:
tags: ["v*"]
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/setup-go@v5
with:
go-version: "1.22"
- name: chkfmt
run: scripts/chkfmt
- name: tests
run: |
scripts/tests
- name: xbuild
run: version=${GITHUB_REF#$"refs/tags/v"} scripts/xbuild
- name: release
uses: softprops/action-gh-release@v2
with:
draft: true
files: dist/*

3
.gitignore vendored
View File

@ -1,3 +0,0 @@
# bin
/chkbit
dist

22
LICENSE
View File

@ -1,22 +0,0 @@
The MIT License (MIT)
Copyright (c) 2014 Christian Zangl
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

267
README.md
View File

@ -1,267 +0,0 @@
# chkbit
chkbit is a tool that ensures the safety of your files by checking if their *data integrity remains intact over time*, especially during transfers and backups. It helps detect issues like disk damage, filesystem errors, and malware interference.
![gif of chkbit](https://raw.githubusercontent.com/wiki/laktak/chkbit/readme/chkbit.gif "chkbit")
- [How it works](#how-it-works)
- [Installation](#installation)
- [Usage](#usage)
- [Repair](#repair)
- [Ignore files](#ignore-files)
- [chkbit as a Go module](#chkbit-as-a-go-module)
- [FAQ](#faq)
## How it works
- **On your Disk**: chkbit starts by creating checksums for each folder on your main disk. It alerts you to potential problems such as damage on the disk, filesystem errors, and malware attacks that could alter your files.
- **On your Backup**: Regardless of your storage media, chkbit stores indexes in hidden files alongside your data during backups. When you run chkbit on your backup, it verifies that every byte was accurately transferred. If issues like [bitrot/data degradation](https://en.wikipedia.org/wiki/Data_degradation) occur, chkbit helps identify damaged files, alerting you to replace them with other backups.
- **For Data in the Cloud**: chkbit is useful for cloud-stored data, alerting you to any changes introduced by cloud providers like video re-encoding or image compression. It ensures your files remain unchanged in the cloud.
Remember to always maintain multiple backups for comprehensive data protection.
## Installation
### Binary releases
You can download the official chkbit binaries from the releases page and place it in your `PATH`.
- https://github.com/laktak/chkbit/releases
### Homebrew (macOS and Linux)
For macOS and Linux it can also be installed via [Homebrew](https://formulae.brew.sh/formula/chkbit):
```shell
brew install chkbit
```
### Build from Source
Building from the source requires Go.
- Either install it directly
```shell
go install github.com/laktak/chkbit/v5/cmd/chkbit@latest
```
- or clone and build
```shell
git clone https://github.com/laktak/chkbit
chkbit/scripts/build
# binary:
ls -l chkbit/chkbit
```
## Usage
Run `chkbit -u PATH` to create/update the chkbit index.
chkbit will
- create a `.chkbit` index in every subdirectory of the path it was given.
- update the index with blake3 (see --algo) hashes for every file.
- report damage for files that failed the integrity check since the last run (check the exit status).
Run `chkbit PATH` to verify only.
```
Usage: chkbit [<paths> ...] [flags]
Arguments:
[<paths> ...] directories to check
Flags:
-h, --help Show context-sensitive help.
-H, --tips Show tips.
-c, --check check mode: chkbit will verify files in readonly mode (default mode)
-u, --update update mode: add and update indices
-a, --add-only add mode: only add new files, do not check existing (quicker)
-i, --show-ignored-only show-ignored mode: only show ignored files
-m, --show-missing show missing files/directories
--force force update of damaged items (advanced usage only)
-S, --skip-symlinks do not follow symlinks
-R, --no-recurse do not recurse into subdirectories
-D, --no-dir-in-index do not track directories in the index
-l, --log-file=STRING write to a logfile if specified
--log-verbose verbose logging
--algo="blake3" hash algorithm: md5, sha512, blake3 (default: blake3)
--index-name=".chkbit" filename where chkbit stores its hashes, needs to start with '.' (default: .chkbit)
--ignore-name=".chkbitignore" filename that chkbit reads its ignore list from, needs to start with '.' (default: .chkbitignore)
-w, --workers=5 number of workers to use (default: 5)
--plain show plain status instead of being fancy
-q, --quiet quiet, don't show progress/information
-v, --verbose verbose output
-V, --version show version information
```
```
$ chkbit -H
.chkbitignore rules:
each line should contain exactly one name
you may use Unix shell-style wildcards (see README)
lines starting with '#' are skipped
lines starting with '/' are only applied to the current directory
Status codes:
DMG: error, data damage detected
EIX: error, index damaged
old: warning, file replaced by an older version
new: new file
upd: file updated
ok : check ok
del: file/directory removed
ign: ignored (see .chkbitignore)
EXC: exception/panic
```
chkbit is set to use only 5 workers by default so it will not slow your system to a crawl. You can specify a higher number to make it a lot faster if the IO throughput can also keep up.
## Repair
chkbit is designed to detect "damage". To repair your files you need to think ahead:
- backup regularly
- run chkbit *before* each backup
- run chkbit *after* a backup on the backup media (readonly)
- in case of any issues, *restore* from a checked backup medium.
## Ignore files
Add a `.chkbitignore` file containing the names of the files/directories you wish to ignore
- each line should contain exactly one name
- you may use Unix shell-style wildcards
- `*` matches everything
- `?` matches any single character
- `[seq]` matches any character in seq
- `[!seq]` matches any character not in seq
- lines starting with `#` are skipped
- lines starting with `/` are only applied to the current directory
- you can use `path/sub/name` to ignore a file/directory in a sub path
- hidden files (starting with a `.`) are ignored by default
## chkbit as a Go module
chkbit is can also be used in other Go programs.
```
go get github.com/laktak/chkbit/v5
```
For more information see the documentation on [pkg.go.dev](https://pkg.go.dev/github.com/laktak/chkbit/v5).
## FAQ
### Should I run `chkbit` on my whole drive?
You would typically run it only on *content* that you keep for a long time (e.g. your pictures, music, videos).
### Why is chkbit placing the index in `.chkbit` files (vs a database)?
The advantage of the .chkbit files is that
- when you move a directory the index moves with it
- when you make a backup the index is also backed up
The disadvantage is obviously that you get hidden `.chkbit` files in your content folders.
### How does chkbit work?
chkbit operates on files.
When run for the first time it records a hash of the file contents as well as the file modification time.
When you run it again it first checks the modification time,
- if the time changed (because you made an edit) it records a new hash.
- otherwise it will compare the current hash to the recorded value and report an error if they do not match.
### I wish to use a different hash algorithm
chkbit now uses blake3 by default. You can also specify `--algo sha512` or `--algo md5`.
Note that existing index files will use the hash that they were created with. If you wish to update all hashes you need to delete your existing indexes first. A conversion mode may be added later (PR welcome).
### How can I delete the index files?
List them with
```
find . -name .chkbit
```
and add `-delete` to delete.
### Can I test if chkbit is working correctly?
On Linux/macOS you can try:
Create test and set the modified time:
```
$ echo foo1 > test; touch -t 201501010000 test
$ chkbit -u .
new ./test
Processed 1 file.
- 0:00:00 elapsed
- 192.31 files/second
- 0.00 MB/second
- 1 directory was updated
- 1 file hash was added
- 0 file hashes were updated
```
`new` indicates a new file was added.
Now update test with a new modified:
```
$ echo foo2 > test; touch -t 201501010001 test # update test & modified
$ chkbit -u .
upd ./test
Processed 1 file.
- 0:00:00 elapsed
- 191.61 files/second
- 0.00 MB/second
- 1 directory was updated
- 0 file hashes were added
- 1 file hash was updated
```
`upd` indicates the file was updated.
Now update test with the same modified to simulate damage:
```
$ echo foo3 > test; touch -t 201501010001 test
$ chkbit -u .
DMG ./test
Processed 1 file.
- 0:00:00 elapsed
- 173.93 files/second
- 0.00 MB/second
chkbit detected damage in these files:
./test
error: detected 1 file with damage!
```
`DMG` indicates damage.

View File

@ -1,25 +0,0 @@
package main
var headerHelp = `Checks the data integrity of your files.
For help tips run "chkbit -H" or go to
https://github.com/laktak/chkbit
`
var helpTips = `
.chkbitignore rules:
each line should contain exactly one name
you may use Unix shell-style wildcards (see README)
lines starting with '#' are skipped
lines starting with '/' are only applied to the current directory
Status codes:
DMG: error, data damage detected
EIX: error, index damaged
old: warning, file replaced by an older version
new: new file
upd: file updated
ok : check ok
del: file/directory removed
ign: ignored (see .chkbitignore)
EXC: exception/panic
`

View File

@ -1,351 +0,0 @@
package main
import (
"fmt"
"io"
"log"
"os"
"strings"
"sync"
"time"
"github.com/alecthomas/kong"
"github.com/laktak/chkbit/v5"
"github.com/laktak/chkbit/v5/cmd/chkbit/util"
"github.com/laktak/lterm"
)
type Progress int
const (
Quiet Progress = iota
Summary
Plain
Fancy
)
const (
updateInterval = time.Millisecond * 700
sizeMB int64 = 1024 * 1024
)
var appVersion = "vdev"
var (
termBG = lterm.Bg8(240)
termSep = "|"
termSepFG = lterm.Fg8(235)
termFG1 = lterm.Fg8(255)
termFG2 = lterm.Fg8(228)
termFG3 = lterm.Fg8(202)
termOKFG = lterm.Fg4(2)
termAlertFG = lterm.Fg4(1)
)
var cli struct {
Paths []string `arg:"" optional:"" name:"paths" help:"directories to check"`
Tips bool `short:"H" help:"Show tips."`
Check bool `short:"c" help:"check mode: chkbit will verify files in readonly mode (default mode)"`
Update bool `short:"u" help:"update mode: add and update indices"`
AddOnly bool `short:"a" help:"add mode: only add new files, do not check existing (quicker)"`
ShowIgnoredOnly bool `short:"i" help:"show-ignored mode: only show ignored files"`
ShowMissing bool `short:"m" help:"show missing files/directories"`
Force bool `help:"force update of damaged items (advanced usage only)"`
SkipSymlinks bool `short:"S" help:"do not follow symlinks"`
NoRecurse bool `short:"R" help:"do not recurse into subdirectories"`
NoDirInIndex bool `short:"D" help:"do not track directories in the index"`
LogFile string `short:"l" help:"write to a logfile if specified"`
LogVerbose bool `help:"verbose logging"`
Algo string `default:"blake3" help:"hash algorithm: md5, sha512, blake3 (default: blake3)"`
IndexName string `default:".chkbit" help:"filename where chkbit stores its hashes, needs to start with '.' (default: .chkbit)"`
IgnoreName string `default:".chkbitignore" help:"filename that chkbit reads its ignore list from, needs to start with '.' (default: .chkbitignore)"`
Workers int `short:"w" default:"5" help:"number of workers to use (default: 5)"`
Plain bool `help:"show plain status instead of being fancy"`
Quiet bool `short:"q" help:"quiet, don't show progress/information"`
Verbose bool `short:"v" help:"verbose output"`
Version bool `short:"V" help:"show version information"`
}
type Main struct {
context *chkbit.Context
dmgList []string
errList []string
verbose bool
logger *log.Logger
logVerbose bool
progress Progress
termWidth int
fps *util.RateCalc
bps *util.RateCalc
}
func (m *Main) log(text string) {
m.logger.Println(time.Now().UTC().Format("2006-01-02 15:04:05"), text)
}
func (m *Main) logStatus(stat chkbit.Status, message string) bool {
if stat == chkbit.STATUS_UPDATE_INDEX {
return false
}
if stat == chkbit.STATUS_ERR_DMG {
m.dmgList = append(m.dmgList, message)
} else if stat == chkbit.STATUS_PANIC {
m.errList = append(m.errList, message)
}
if m.logVerbose || !stat.IsVerbose() {
m.log(stat.String() + " " + message)
}
if m.verbose || !stat.IsVerbose() {
col := ""
if stat.IsErrorOrWarning() {
col = termAlertFG
}
lterm.Printline(col, stat.String(), " ", message, lterm.Reset)
return true
}
return false
}
func (m *Main) showStatus() {
last := time.Now().Add(-updateInterval)
stat := ""
for {
select {
case item := <-m.context.LogQueue:
if item == nil {
if m.progress == Fancy {
lterm.Printline("")
}
return
}
if m.logStatus(item.Stat, item.Message) {
if m.progress == Fancy {
lterm.Write(termBG, termFG1, stat, lterm.ClearLine(0), lterm.Reset, "\r")
} else {
fmt.Print(m.context.NumTotal, "\r")
}
}
case perf := <-m.context.PerfQueue:
now := time.Now()
m.fps.Push(now, perf.NumFiles)
m.bps.Push(now, perf.NumBytes)
if last.Add(updateInterval).Before(now) {
last = now
if m.progress == Fancy {
statF := fmt.Sprintf("%d files/s", m.fps.Last())
statB := fmt.Sprintf("%d MB/s", m.bps.Last()/sizeMB)
stat = "RW"
if !m.context.UpdateIndex {
stat = "RO"
}
stat = fmt.Sprintf("[%s:%d] %5d files $ %s %-13s $ %s %-13s",
stat, m.context.NumWorkers, m.context.NumTotal,
util.Sparkline(m.fps.Stats), statF,
util.Sparkline(m.bps.Stats), statB)
stat = util.LeftTruncate(stat, m.termWidth-1)
stat = strings.Replace(stat, "$", termSepFG+termSep+termFG2, 1)
stat = strings.Replace(stat, "$", termSepFG+termSep+termFG3, 1)
lterm.Write(termBG, termFG1, stat, lterm.ClearLine(0), lterm.Reset, "\r")
} else if m.progress == Plain {
fmt.Print(m.context.NumTotal, "\r")
}
}
}
}
}
func (m *Main) process() bool {
// verify mode
var b01 = map[bool]int8{false: 0, true: 1}
if b01[cli.Check]+b01[cli.Update]+b01[cli.AddOnly]+b01[cli.ShowIgnoredOnly] > 1 {
fmt.Println("Error: can only run one mode at a time!")
os.Exit(1)
}
var err error
m.context, err = chkbit.NewContext(cli.Workers, cli.Algo, cli.IndexName, cli.IgnoreName)
if err != nil {
fmt.Println(err)
return false
}
m.context.ForceUpdateDmg = cli.Force
m.context.UpdateIndex = cli.Update || cli.AddOnly
m.context.AddOnly = cli.AddOnly
m.context.ShowIgnoredOnly = cli.ShowIgnoredOnly
m.context.ShowMissing = cli.ShowMissing
m.context.SkipSymlinks = cli.SkipSymlinks
m.context.SkipSubdirectories = cli.NoRecurse
m.context.TrackDirectories = !cli.NoDirInIndex
var wg sync.WaitGroup
wg.Add(1)
go func() {
defer wg.Done()
m.showStatus()
}()
m.context.Start(cli.Paths)
wg.Wait()
return true
}
func (m *Main) printResult() {
cprint := func(col, text string) {
if m.progress != Quiet {
if m.progress == Fancy {
lterm.Printline(col, text, lterm.Reset)
} else {
fmt.Println(text)
}
}
}
eprint := func(col, text string) {
if m.progress == Fancy {
lterm.Write(col)
fmt.Fprintln(os.Stderr, text)
lterm.Write(lterm.Reset)
} else {
fmt.Fprintln(os.Stderr, text)
}
}
if m.progress != Quiet {
mode := ""
if !m.context.UpdateIndex {
mode = " in readonly mode"
}
status := fmt.Sprintf("Processed %s%s.", util.LangNum1MutateSuffix(m.context.NumTotal, "file"), mode)
cprint(termOKFG, status)
m.log(status)
if m.progress == Fancy && m.context.NumTotal > 0 {
elapsed := time.Since(m.fps.Start)
elapsedS := elapsed.Seconds()
fmt.Println("-", elapsed.Truncate(time.Second), "elapsed")
fmt.Printf("- %.2f files/second\n", (float64(m.fps.Total)+float64(m.fps.Current))/elapsedS)
fmt.Printf("- %.2f MB/second\n", (float64(m.bps.Total)+float64(m.bps.Current))/float64(sizeMB)/elapsedS)
}
del := ""
if m.context.UpdateIndex {
if m.context.NumIdxUpd > 0 {
if m.context.NumDel > 0 {
del = fmt.Sprintf("\n- %s been removed", util.LangNum1Choice(m.context.NumDel, "file/directory has", "files/directories have"))
}
cprint(termOKFG, fmt.Sprintf("- %s updated\n- %s added\n- %s updated%s",
util.LangNum1Choice(m.context.NumIdxUpd, "directory was", "directories were"),
util.LangNum1Choice(m.context.NumNew, "file hash was", "file hashes were"),
util.LangNum1Choice(m.context.NumUpd, "file hash was", "file hashes were"),
del))
}
} else if m.context.NumNew+m.context.NumUpd+m.context.NumDel > 0 {
if m.context.NumDel > 0 {
del = fmt.Sprintf("\n- %s would have been removed", util.LangNum1Choice(m.context.NumDel, "file/directory", "files/directories"))
}
cprint(termAlertFG, fmt.Sprintf("No changes were made (specify -u to update):\n- %s would have been added\n- %s would have been updated%s",
util.LangNum1MutateSuffix(m.context.NumNew, "file"),
util.LangNum1MutateSuffix(m.context.NumUpd, "file"),
del))
}
}
if len(m.dmgList) > 0 {
eprint(termAlertFG, "chkbit detected damage in these files:")
for _, err := range m.dmgList {
fmt.Fprintln(os.Stderr, err)
}
n := len(m.dmgList)
status := fmt.Sprintf("error: detected %s with damage!", util.LangNum1MutateSuffix(n, "file"))
m.log(status)
eprint(termAlertFG, status)
}
if len(m.errList) > 0 {
status := "chkbit ran into errors"
m.log(status + "!")
eprint(termAlertFG, status+":")
for _, err := range m.errList {
fmt.Fprintln(os.Stderr, err)
}
}
if len(m.dmgList) > 0 || len(m.errList) > 0 {
os.Exit(1)
}
}
func (m *Main) run() {
if len(os.Args) < 2 {
os.Args = append(os.Args, "--help")
}
kong.Parse(&cli,
kong.Name("chkbit"),
kong.Description(""),
kong.UsageOnError(),
)
if cli.Tips {
fmt.Println(helpTips)
os.Exit(0)
}
if cli.Version {
fmt.Println("github.com/laktak/chkbit")
fmt.Println(appVersion)
return
}
m.verbose = cli.Verbose || cli.ShowIgnoredOnly
if cli.LogFile != "" {
m.logVerbose = cli.LogVerbose
f, err := os.OpenFile(cli.LogFile, os.O_CREATE|os.O_WRONLY|os.O_APPEND, 0666)
if err != nil {
fmt.Println(err)
return
}
defer f.Close()
m.logger = log.New(f, "", 0)
}
if cli.Quiet {
m.progress = Quiet
} else if fileInfo, _ := os.Stdout.Stat(); (fileInfo.Mode() & os.ModeCharDevice) == 0 {
m.progress = Summary
} else if cli.Plain {
m.progress = Plain
} else {
m.progress = Fancy
}
if len(cli.Paths) > 0 {
m.log("chkbit " + strings.Join(cli.Paths, ", "))
if m.process() && !m.context.ShowIgnoredOnly {
m.printResult()
}
} else {
fmt.Println("specify a path to check, see -h")
}
}
func main() {
defer func() {
if r := recover(); r != nil {
fmt.Println(r)
os.Exit(1)
}
}()
termWidth := lterm.GetWidth()
m := &Main{
logger: log.New(io.Discard, "", 0),
termWidth: termWidth,
fps: util.NewRateCalc(time.Second, (termWidth-70)/2),
bps: util.NewRateCalc(time.Second, (termWidth-70)/2),
}
m.run()
}

View File

@ -1,35 +0,0 @@
package util
import (
"math"
)
func Minimum(series []int64) int64 {
var min int64 = math.MaxInt64
for _, value := range series {
if value < min {
min = value
}
}
return min
}
func Maximum(series []int64) int64 {
var max int64 = math.MinInt64
for _, value := range series {
if value > max {
max = value
}
}
return max
}
func Clamp(min int64, max int64, n int64) int64 {
if n < min {
return min
}
if n > max {
return max
}
return n
}

View File

@ -1,19 +0,0 @@
package util
import "fmt"
func LangNum1MutateSuffix(num int, u string) string {
s := ""
if num != 1 {
s = "s"
}
return fmt.Sprintf("%d %s%s", num, u, s)
}
func LangNum1Choice(num int, u1, u2 string) string {
u := u1
if num != 1 {
u = u2
}
return fmt.Sprintf("%d %s", num, u)
}

View File

@ -1,52 +0,0 @@
package util
import (
"time"
)
type RateCalc struct {
Interval time.Duration
MaxStat int
Start time.Time
Updated time.Time
Total int64
Current int64
Stats []int64
}
func NewRateCalc(interval time.Duration, maxStat int) *RateCalc {
if maxStat < 10 {
maxStat = 10
}
rc := &RateCalc{
Interval: interval,
MaxStat: maxStat,
}
rc.Reset()
return rc
}
func (rc *RateCalc) Reset() {
rc.Start = time.Now()
rc.Updated = rc.Start
rc.Total = 0
rc.Current = 0
rc.Stats = make([]int64, rc.MaxStat)
}
func (rc *RateCalc) Last() int64 {
return rc.Stats[len(rc.Stats)-1]
}
func (rc *RateCalc) Push(ts time.Time, value int64) {
for rc.Updated.Add(rc.Interval).Before(ts) {
rc.Stats = append(rc.Stats, rc.Current)
if len(rc.Stats) > rc.MaxStat {
rc.Stats = rc.Stats[len(rc.Stats)-rc.MaxStat:]
}
rc.Total += rc.Current
rc.Current = 0
rc.Updated = rc.Updated.Add(rc.Interval)
}
rc.Current += value
}

View File

@ -1,32 +0,0 @@
package util
import (
"math"
)
var sparkChars = []rune{'▁', '▂', '▃', '▄', '▅', '▆', '▇', '█'}
func Sparkline(series []int64) string {
out := make([]rune, len(series))
min := Minimum(series)
max := Maximum(series)
dataRange := max - min
if dataRange == 0 {
for i := range series {
out[i] = sparkChars[0]
}
} else {
step := float64(len(sparkChars)-1) / float64(dataRange)
for i, n := range series {
idx := int(math.Round(float64(Clamp(min, max, n)-min) * step))
if idx < 0 {
out[i] = ' '
} else if idx > len(sparkChars) {
out[i] = sparkChars[len(sparkChars)-1]
} else {
out[i] = sparkChars[idx]
}
}
}
return string(out)
}

View File

@ -1,13 +0,0 @@
package util
import (
"testing"
)
func TestSpark(t *testing.T) {
expected := "▁▁▂▄▅▇██▆▄▂"
actual := Sparkline([]int64{5, 12, 35, 73, 80, 125, 150, 142, 118, 61, 19})
if expected != actual {
t.Error("expected:", expected, "actual:", actual)
}
}

View File

@ -1,11 +0,0 @@
package util
func LeftTruncate(s string, nMax int) string {
for i := range s {
nMax--
if nMax < 0 {
return s[:i]
}
}
return s
}

View File

@ -1,13 +0,0 @@
package util
import (
"testing"
)
func TestTrunc(t *testing.T) {
expected := "ab©def"
actual := LeftTruncate(expected+"ghijk", 6)
if expected != actual {
t.Error("expected:", expected, "actual:", actual)
}
}

View File

@ -1,195 +0,0 @@
package chkbit
import (
"errors"
"os"
"path/filepath"
"sync"
)
type Context struct {
NumWorkers int
UpdateIndex bool
AddOnly bool
ShowIgnoredOnly bool
ShowMissing bool
ForceUpdateDmg bool
HashAlgo string
TrackDirectories bool
SkipSymlinks bool
SkipSubdirectories bool
IndexFilename string
IgnoreFilename string
WorkQueue chan *WorkItem
LogQueue chan *LogEvent
PerfQueue chan *PerfEvent
wg sync.WaitGroup
mutex sync.Mutex
NumTotal int
NumIdxUpd int
NumNew int
NumUpd int
NumDel int
}
func NewContext(numWorkers int, hashAlgo string, indexFilename string, ignoreFilename string) (*Context, error) {
if indexFilename[0] != '.' {
return nil, errors.New("The index filename must start with a dot!")
}
if ignoreFilename[0] != '.' {
return nil, errors.New("The ignore filename must start with a dot!")
}
if hashAlgo != "md5" && hashAlgo != "sha512" && hashAlgo != "blake3" {
return nil, errors.New(hashAlgo + " is unknown.")
}
return &Context{
NumWorkers: numWorkers,
HashAlgo: hashAlgo,
IndexFilename: indexFilename,
IgnoreFilename: ignoreFilename,
WorkQueue: make(chan *WorkItem, numWorkers*10),
LogQueue: make(chan *LogEvent, numWorkers*100),
PerfQueue: make(chan *PerfEvent, numWorkers*10),
}, nil
}
func (context *Context) log(stat Status, message string) {
context.mutex.Lock()
defer context.mutex.Unlock()
switch stat {
case STATUS_ERR_DMG:
context.NumTotal++
case STATUS_UPDATE_INDEX:
context.NumIdxUpd++
case STATUS_UP_WARN_OLD:
context.NumTotal++
context.NumUpd++
case STATUS_UPDATE:
context.NumTotal++
context.NumUpd++
case STATUS_NEW:
context.NumTotal++
context.NumNew++
case STATUS_OK:
if !context.AddOnly {
context.NumTotal++
}
case STATUS_MISSING:
context.NumDel++
//case STATUS_PANIC:
//case STATUS_ERR_IDX:
//case STATUS_IGNORE:
}
context.LogQueue <- &LogEvent{stat, message}
}
func (context *Context) logErr(path string, err error) {
context.LogQueue <- &LogEvent{STATUS_PANIC, path + ": " + err.Error()}
}
func (context *Context) perfMonFiles(numFiles int64) {
context.PerfQueue <- &PerfEvent{numFiles, 0}
}
func (context *Context) perfMonBytes(numBytes int64) {
context.PerfQueue <- &PerfEvent{0, numBytes}
}
func (context *Context) addWork(path string, filesToIndex []string, dirList []string, ignore *Ignore) {
context.WorkQueue <- &WorkItem{path, filesToIndex, dirList, ignore}
}
func (context *Context) endWork() {
context.WorkQueue <- nil
}
func (context *Context) isChkbitFile(name string) bool {
return name == context.IndexFilename || name == context.IgnoreFilename
}
func (context *Context) Start(pathList []string) {
context.NumTotal = 0
context.NumIdxUpd = 0
context.NumNew = 0
context.NumUpd = 0
context.NumDel = 0
var wg sync.WaitGroup
wg.Add(context.NumWorkers)
for i := 0; i < context.NumWorkers; i++ {
go func(id int) {
defer wg.Done()
context.runWorker(id)
}(i)
}
go func() {
for _, path := range pathList {
context.scanDir(path, nil)
}
for i := 0; i < context.NumWorkers; i++ {
context.endWork()
}
}()
wg.Wait()
context.LogQueue <- nil
}
func (context *Context) scanDir(root string, parentIgnore *Ignore) {
files, err := os.ReadDir(root)
if err != nil {
context.logErr(root+"/", err)
return
}
isDir := func(file os.DirEntry, path string) bool {
if file.IsDir() {
return true
}
ft := file.Type()
if !context.SkipSymlinks && ft&os.ModeSymlink != 0 {
rpath, err := filepath.EvalSymlinks(path)
if err == nil {
fi, err := os.Lstat(rpath)
return err == nil && fi.IsDir()
}
}
return false
}
var dirList []string
var filesToIndex []string
ignore, err := GetIgnore(context, root, parentIgnore)
if err != nil {
context.logErr(root+"/", err)
}
for _, file := range files {
path := filepath.Join(root, file.Name())
if file.Name()[0] == '.' {
if context.ShowIgnoredOnly && !context.isChkbitFile(file.Name()) {
context.log(STATUS_IGNORE, path)
}
continue
}
if isDir(file, path) {
if !ignore.shouldIgnore(file.Name()) {
dirList = append(dirList, file.Name())
} else {
context.log(STATUS_IGNORE, file.Name()+"/")
}
} else if file.Type().IsRegular() {
filesToIndex = append(filesToIndex, file.Name())
}
}
context.addWork(root, filesToIndex, dirList, ignore)
if !context.SkipSubdirectories {
for _, name := range dirList {
context.scanDir(filepath.Join(root, name), ignore)
}
}
}

15
go.mod
View File

@ -1,15 +0,0 @@
module github.com/laktak/chkbit/v5
go 1.22.3
require (
github.com/alecthomas/kong v0.9.0
github.com/laktak/lterm v1.0.0
lukechampine.com/blake3 v1.3.0
)
require (
github.com/klauspost/cpuid/v2 v2.0.9 // indirect
golang.org/x/sys v0.24.0 // indirect
golang.org/x/term v0.23.0 // indirect
)

18
go.sum
View File

@ -1,18 +0,0 @@
github.com/alecthomas/assert/v2 v2.6.0 h1:o3WJwILtexrEUk3cUVal3oiQY2tfgr/FHWiz/v2n4FU=
github.com/alecthomas/assert/v2 v2.6.0/go.mod h1:Bze95FyfUr7x34QZrjL+XP+0qgp/zg8yS+TtBj1WA3k=
github.com/alecthomas/kong v0.9.0 h1:G5diXxc85KvoV2f0ZRVuMsi45IrBgx9zDNGNj165aPA=
github.com/alecthomas/kong v0.9.0/go.mod h1:Y47y5gKfHp1hDc7CH7OeXgLIpp+Q2m1Ni0L5s3bI8Os=
github.com/alecthomas/repr v0.4.0 h1:GhI2A8MACjfegCPVq9f1FLvIBS+DrQ2KQBFZP1iFzXc=
github.com/alecthomas/repr v0.4.0/go.mod h1:Fr0507jx4eOXV7AlPV6AVZLYrLIuIeSOWtW57eE/O/4=
github.com/hexops/gotextdiff v1.0.3 h1:gitA9+qJrrTCsiCl7+kh75nPqQt1cx4ZkudSTLoUqJM=
github.com/hexops/gotextdiff v1.0.3/go.mod h1:pSWU5MAI3yDq+fZBTazCSJysOMbxWL1BSow5/V2vxeg=
github.com/klauspost/cpuid/v2 v2.0.9 h1:lgaqFMSdTdQYdZ04uHyN2d/eKdOMyi2YLSvlQIBFYa4=
github.com/klauspost/cpuid/v2 v2.0.9/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg=
github.com/laktak/lterm v1.0.0 h1:hxRsBDHIWlMn+IV7C+6/tk4y81WqcO8F6G+52wSZUf4=
github.com/laktak/lterm v1.0.0/go.mod h1:zwGyZi5PNuySqsDsRVNvBBYANy9k61oYgW6Flsm2AZg=
golang.org/x/sys v0.24.0 h1:Twjiwq9dn6R1fQcyiK+wQyHWfaz/BJB+YIpzU/Cv3Xg=
golang.org/x/sys v0.24.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/term v0.23.0 h1:F6D4vR+EHoL9/sWAWgAR1H2DcHr4PareCbAaCo1RpuU=
golang.org/x/term v0.23.0/go.mod h1:DgV24QBUrK6jhZXl+20l6UWznPlwAHm1Q1mGHtydmSk=
lukechampine.com/blake3 v1.3.0 h1:sJ3XhFINmHSrYCgl958hscfIa3bw8x4DqMP3u1YvoYE=
lukechampine.com/blake3 v1.3.0/go.mod h1:0OFRp7fBtAylGVCO40o87sbupkyIGgbpv1+M1k1LM6k=

View File

@ -1,57 +0,0 @@
package chkbit
import (
"crypto/md5"
"crypto/sha512"
"encoding/hex"
"errors"
"hash"
"io"
"os"
"lukechampine.com/blake3"
)
const BLOCKSIZE = 2 << 10 << 7 // kb
func Hashfile(path string, hashAlgo string, perfMonBytes func(int64)) (string, error) {
var h hash.Hash
switch hashAlgo {
case "md5":
h = md5.New()
case "sha512":
h = sha512.New()
case "blake3":
h = blake3.New(32, nil)
default:
return "", errors.New("algo '" + hashAlgo + "' is unknown.")
}
file, err := os.Open(path)
if err != nil {
return "", err
}
defer file.Close()
buf := make([]byte, BLOCKSIZE)
for {
bytesRead, err := file.Read(buf)
if err != nil && err != io.EOF {
return "", err
}
if bytesRead == 0 {
break
}
h.Write(buf[:bytesRead])
if perfMonBytes != nil {
perfMonBytes(int64(bytesRead))
}
}
return hex.EncodeToString(h.Sum(nil)), nil
}
func hashMd5(data []byte) string {
h := md5.New()
h.Write(data)
return hex.EncodeToString(h.Sum(nil))
}

View File

@ -1,90 +0,0 @@
package chkbit
import (
"bufio"
"os"
"path/filepath"
"strings"
)
type Ignore struct {
parentIgnore *Ignore
context *Context
path string
name string
itemList []string
}
func GetIgnore(context *Context, path string, parentIgnore *Ignore) (*Ignore, error) {
ignore := &Ignore{
parentIgnore: parentIgnore,
context: context,
path: path,
name: filepath.Base(path) + "/",
}
err := ignore.loadIgnore()
if err != nil {
return nil, err
}
return ignore, nil
}
func (ignore *Ignore) getIgnoreFilepath() string {
return filepath.Join(ignore.path, ignore.context.IgnoreFilename)
}
func (ignore *Ignore) loadIgnore() error {
if _, err := os.Stat(ignore.getIgnoreFilepath()); err != nil {
if os.IsNotExist(err) {
return nil
}
return err
}
file, err := os.Open(ignore.getIgnoreFilepath())
if err != nil {
return err
}
defer file.Close()
scanner := bufio.NewScanner(file)
for scanner.Scan() {
line := strings.TrimSpace(scanner.Text())
if line != "" && line[0] != '#' {
ignore.itemList = append(ignore.itemList, line)
}
}
return scanner.Err()
}
func (ignore *Ignore) shouldIgnore(name string) bool {
return ignore.shouldIgnore2(name, "")
}
func (ignore *Ignore) shouldIgnore2(name string, fullname string) bool {
for _, item := range ignore.itemList {
if item[0] == '/' {
if len(fullname) > 0 {
continue
} else {
item = item[1:]
}
}
if match, _ := filepath.Match(item, name); match {
return true
}
if fullname != "" {
if match, _ := filepath.Match(item, fullname); match {
return true
}
}
}
if ignore.parentIgnore != nil {
if fullname != "" {
return ignore.parentIgnore.shouldIgnore2(fullname, ignore.name+fullname)
} else {
return ignore.parentIgnore.shouldIgnore2(name, ignore.name+name)
}
}
return false
}

303
index.go
View File

@ -1,303 +0,0 @@
package chkbit
import (
"encoding/json"
"errors"
"os"
"path/filepath"
"slices"
)
const VERSION = 2 // index version
var (
algoMd5 = "md5"
)
type idxInfo struct {
ModTime int64 `json:"mod"`
Algo *string `json:"a,omitempty"`
Hash *string `json:"h,omitempty"`
LegacyHash *string `json:"md5,omitempty"`
}
type indexFile struct {
V int `json:"v"`
// IdxRaw -> map[string]idxInfo
IdxRaw json.RawMessage `json:"idx"`
IdxHash string `json:"idx_hash"`
// 2024-08 optional, list of subdirectories
Dir []string `json:"dirlist,omitempty"`
}
type idxInfo1 struct {
ModTime int64 `json:"mod"`
Hash string `json:"md5"`
}
type indexFile1 struct {
Data map[string]idxInfo1 `json:"data"`
}
type Index struct {
context *Context
path string
files []string
cur map[string]idxInfo
new map[string]idxInfo
curDirList []string
newDirList []string
modified bool
readonly bool
}
func newIndex(context *Context, path string, files []string, dirList []string, readonly bool) *Index {
slices.Sort(dirList)
return &Index{
context: context,
path: path,
files: files,
cur: make(map[string]idxInfo),
new: make(map[string]idxInfo),
curDirList: make([]string, 0),
newDirList: dirList,
readonly: readonly,
}
}
func (i *Index) getIndexFilepath() string {
return filepath.Join(i.path, i.context.IndexFilename)
}
func (i *Index) logFilePanic(name string, message string) {
i.context.log(STATUS_PANIC, filepath.Join(i.path, name)+": "+message)
}
func (i *Index) logFile(stat Status, name string) {
i.context.log(stat, filepath.Join(i.path, name))
}
func (i *Index) logDir(stat Status, name string) {
i.context.log(stat, filepath.Join(i.path, name)+"/")
}
func (i *Index) calcHashes(ignore *Ignore) {
for _, name := range i.files {
if ignore != nil && ignore.shouldIgnore(name) {
i.logFile(STATUS_IGNORE, name)
continue
}
var err error
var info *idxInfo
algo := i.context.HashAlgo
if val, ok := i.cur[name]; ok {
// existing file
if val.LegacyHash != nil {
// convert from py1 to new format
val = idxInfo{
ModTime: val.ModTime,
Algo: &algoMd5,
Hash: val.LegacyHash,
}
i.cur[name] = val
}
if val.Algo != nil {
algo = *val.Algo
}
if i.context.AddOnly {
info = &val
} else {
info, err = i.calcFile(name, algo)
}
} else {
// new file
if i.readonly {
info = &idxInfo{Algo: &algo}
} else {
info, err = i.calcFile(name, algo)
}
}
if err != nil {
i.logFilePanic(name, err.Error())
} else {
i.new[name] = *info
}
}
}
func (i *Index) showIgnoredOnly(ignore *Ignore) {
for _, name := range i.files {
if ignore.shouldIgnore(name) {
i.logFile(STATUS_IGNORE, name)
}
}
}
func (i *Index) checkFix(forceUpdateDmg bool) {
for name, b := range i.new {
if a, ok := i.cur[name]; !ok {
i.logFile(STATUS_NEW, name)
i.modified = true
} else {
amod := int64(a.ModTime)
bmod := int64(b.ModTime)
if a.Hash != nil && b.Hash != nil && *a.Hash == *b.Hash {
i.logFile(STATUS_OK, name)
if amod != bmod {
i.modified = true
}
continue
}
if amod == bmod {
i.logFile(STATUS_ERR_DMG, name)
if !forceUpdateDmg {
// keep DMG entry
i.new[name] = a
} else {
i.modified = true
}
} else if amod < bmod {
i.logFile(STATUS_UPDATE, name)
i.modified = true
} else if amod > bmod {
i.logFile(STATUS_UP_WARN_OLD, name)
i.modified = true
}
}
}
// track missing
for name := range i.cur {
if _, ok := i.new[name]; !ok {
i.modified = true
if i.context.ShowMissing {
i.logFile(STATUS_MISSING, name)
}
}
}
// dirs
m := make(map[string]bool)
for _, n := range i.newDirList {
m[n] = true
}
for _, name := range i.curDirList {
if !m[name] {
i.modified = true
if i.context.ShowMissing {
i.logDir(STATUS_MISSING, name+"/")
}
}
}
if len(i.newDirList) != len(i.curDirList) {
// added
i.modified = true
}
}
func (i *Index) calcFile(name string, a string) (*idxInfo, error) {
path := filepath.Join(i.path, name)
info, _ := os.Stat(path)
mtime := int64(info.ModTime().UnixNano() / 1e6)
h, err := Hashfile(path, a, i.context.perfMonBytes)
if err != nil {
return nil, err
}
i.context.perfMonFiles(1)
return &idxInfo{
ModTime: mtime,
Algo: &a,
Hash: &h,
}, nil
}
func (i *Index) save() (bool, error) {
if i.modified {
if i.readonly {
return false, errors.New("Error trying to save a readonly index.")
}
text, err := json.Marshal(i.new)
if err != nil {
return false, err
}
data := indexFile{
V: VERSION,
IdxRaw: text,
IdxHash: hashMd5(text),
}
if i.context.TrackDirectories {
data.Dir = i.newDirList
}
file, err := json.Marshal(data)
if err != nil {
return false, err
}
err = os.WriteFile(i.getIndexFilepath(), file, 0644)
if err != nil {
return false, err
}
i.modified = false
return true, nil
} else {
return false, nil
}
}
func (i *Index) load() error {
if _, err := os.Stat(i.getIndexFilepath()); err != nil {
if os.IsNotExist(err) {
return nil
}
return err
}
i.modified = false
file, err := os.ReadFile(i.getIndexFilepath())
if err != nil {
return err
}
var data indexFile
err = json.Unmarshal(file, &data)
if err != nil {
return err
}
if data.IdxRaw != nil {
err = json.Unmarshal(data.IdxRaw, &i.cur)
if err != nil {
return err
}
text := data.IdxRaw
if data.IdxHash != hashMd5(text) {
// old versions may have saved the JSON encoded with extra spaces
text, _ = json.Marshal(data.IdxRaw)
} else {
}
if data.IdxHash != hashMd5(text) {
i.modified = true
i.logFile(STATUS_ERR_IDX, i.getIndexFilepath())
}
} else {
var data1 indexFile1
json.Unmarshal(file, &data1)
if data1.Data != nil {
// convert from js to new format
for name, item := range data1.Data {
i.cur[name] = idxInfo{
ModTime: item.ModTime,
Algo: &algoMd5,
Hash: &item.Hash,
}
}
}
}
// dirs
if data.Dir != nil {
slices.Sort(data.Dir)
i.curDirList = data.Dir
}
return nil
}

BIN
readme/chkbit-py.gif Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 531 KiB

View File

@ -1,8 +0,0 @@
#!/bin/bash
set -eE -o pipefail
script_dir=$(dirname "$(realpath "$0")")
cd $script_dir/..
version=$(git describe --tags --always)
go build -ldflags="-X main.appVersion=$version" ./cmd/chkbit

View File

@ -1,13 +0,0 @@
#!/bin/bash
set -eE -o pipefail
script_dir=$(dirname "$(realpath "$0")")
cd $script_dir/..
res="$(gofmt -l . 2>&1)"
if [ -n "$res" ]; then
echo "gofmt check failed:"
echo "${res}"
exit 1
fi

View File

@ -1,7 +0,0 @@
#!/bin/bash
set -eE -o pipefail
script_dir=$(dirname "$(realpath "$0")")
cd $script_dir/..
go vet -structtag=false -composites=false ./...

View File

@ -1,342 +0,0 @@
package main
import (
"fmt"
"os"
"os/exec"
"path/filepath"
"runtime"
"strings"
"testing"
"time"
)
// perform integration test using the compiled binary
var testDir = "/tmp/chkbit"
func getCmd() string {
_, filename, _, _ := runtime.Caller(0)
prjRoot := filepath.Dir(filepath.Dir(filename))
return filepath.Join(prjRoot, "chkbit")
}
func checkOut(t *testing.T, sout string, expected string) {
if !strings.Contains(sout, expected) {
t.Errorf("Expected '%s' in output, got '%s'\n", expected, sout)
}
}
func checkNotOut(t *testing.T, sout string, notExpected string) {
if strings.Contains(sout, notExpected) {
t.Errorf("Did not expect '%s' in output, got '%s'\n", notExpected, sout)
}
}
// misc files
var (
startList = []string{"time", "year", "people", "way", "day", "thing"}
wordList = []string{"life", "world", "school", "state", "family", "student", "group", "country", "problem", "hand", "part", "place", "case", "week", "company", "system", "program", "work", "government", "number", "night", "point", "home", "water", "room", "mother", "area", "money", "story", "fact", "month", "lot", "right", "study", "book", "eye", "job", "word", "business", "issue", "side", "kind", "head", "house", "service", "friend", "father", "power", "hour", "game", "line", "end", "member", "law", "car", "city", "community", "name", "president", "team", "minute", "idea", "kid", "body", "information", "back", "face", "others", "level", "office", "door", "health", "person", "art", "war", "history", "party", "result", "change", "morning", "reason", "research", "moment", "air", "teacher", "force", "education"}
extList = []string{"txt", "md", "pdf", "jpg", "jpeg", "png", "mp4", "mp3", "csv"}
startDate = time.Date(2000, 1, 1, 0, 0, 0, 0, time.UTC)
endDate = time.Date(2024, 12, 1, 0, 0, 0, 0, time.UTC)
dateList = []time.Time{}
wordIdx = 0
extIdx = 0
dateIdx = 0
)
func nextWord() string {
word := wordList[wordIdx%len(wordList)]
wordIdx++
return word
}
func nextExt() string {
ext := extList[extIdx%len(extList)]
extIdx++
return ext
}
func setDate(filename string, r int) {
date := dateList[dateIdx%len(dateList)]
m := 17 * dateIdx / len(dateList)
date = date.Add(time.Duration(m) * time.Hour)
dateIdx++
os.Chtimes(filename, date, date)
}
func genFile(path string, size int) {
os.WriteFile(path, make([]byte, size), 0644)
setDate(path, size*size)
}
func genFiles(dir string, a int) {
os.MkdirAll(dir, 0755)
for i := 1; i <= 5; i++ {
size := a*i*wordIdx*100 + extIdx
file := nextWord() + "-" + nextWord()
if i%3 == 0 {
file += "-" + nextWord()
}
file += "." + nextExt()
genFile(filepath.Join(dir, file), size)
}
}
func genDir(root string) {
for _, start := range startList {
for i := 1; i <= 5; i++ {
dir := filepath.Join(root, start, nextWord())
genFiles(dir, 1)
if wordIdx%3 == 0 {
dir = filepath.Join(dir, nextWord())
genFiles(dir, 1)
}
}
}
}
func setupMiscFiles() {
var c int64 = 50
interval := (int64)(endDate.Sub(startDate).Seconds()) / c
for i := range make([]int64, c) {
dateList = append(dateList, startDate.Add(time.Duration(interval*(int64)(i))*time.Second))
}
root := filepath.Join(testDir, "root")
if err := os.RemoveAll(testDir); err != nil {
fmt.Println("Failed to clean", err)
panic(err)
}
genDir(root)
os.MkdirAll(filepath.Join(root, "day/car/empty"), 0755)
rootPeople := filepath.Join(root, "people")
testPeople := filepath.Join(testDir, "people")
err := os.Rename(rootPeople, testPeople)
if err != nil {
fmt.Println("Rename failed", err)
panic(err)
}
err = os.Symlink(testPeople, rootPeople)
if err != nil {
fmt.Println("Symlink failed", err)
panic(err)
}
}
func TestRoot(t *testing.T) {
setupMiscFiles()
tool := getCmd()
root := filepath.Join(testDir, "root")
// update index, no recourse
t.Run("no-recourse", func(t *testing.T) {
cmd := exec.Command(tool, "-umR", filepath.Join(root, "day/office"))
out, err := cmd.Output()
if err != nil {
t.Fatalf("failed with '%s'\n", err)
}
sout := string(out)
checkOut(t, sout, "Processed 5 files")
checkOut(t, sout, "- 1 directory was updated")
checkOut(t, sout, "- 5 file hashes were added")
checkOut(t, sout, "- 0 file hashes were updated")
checkNotOut(t, sout, "removed")
})
// update remaining index from root
t.Run("update-remaining", func(t *testing.T) {
cmd := exec.Command(tool, "-um", root)
out, err := cmd.Output()
if err != nil {
t.Fatalf("failed with '%s'\n", err)
}
sout := string(out)
checkOut(t, sout, "Processed 300 files")
checkOut(t, sout, "- 66 directories were updated")
checkOut(t, sout, "- 295 file hashes were added")
checkOut(t, sout, "- 0 file hashes were updated")
checkNotOut(t, sout, "removed")
})
// delete files, check for missing
t.Run("delete", func(t *testing.T) {
os.RemoveAll(filepath.Join(root, "thing/change"))
os.Remove(filepath.Join(root, "time/hour/minute/body-information.csv"))
cmd := exec.Command(tool, "-m", root)
out, err := cmd.Output()
if err != nil {
t.Fatalf("failed with '%s'\n", err)
}
sout := string(out)
checkOut(t, sout, "del /tmp/chkbit/root/thing/change/")
checkOut(t, sout, "2 files/directories would have been removed")
})
// do not report missing without -m
t.Run("no-missing", func(t *testing.T) {
cmd := exec.Command(tool, root)
out, err := cmd.Output()
if err != nil {
t.Fatalf("failed with '%s'\n", err)
}
sout := string(out)
checkNotOut(t, sout, "del ")
checkNotOut(t, sout, "removed")
})
// check for missing and update
t.Run("missing", func(t *testing.T) {
cmd := exec.Command(tool, "-um", root)
out, err := cmd.Output()
if err != nil {
t.Fatalf("failed with '%s'\n", err)
}
sout := string(out)
checkOut(t, sout, "del /tmp/chkbit/root/thing/change/")
checkOut(t, sout, "2 files/directories have been removed")
})
// check again
t.Run("repeat", func(t *testing.T) {
for i := 0; i < 10; i++ {
cmd := exec.Command(tool, "-uv", root)
out, err := cmd.Output()
if err != nil {
t.Fatalf("failed with '%s'\n", err)
}
sout := string(out)
checkOut(t, sout, "Processed 289 files")
checkNotOut(t, sout, "removed")
checkNotOut(t, sout, "updated")
checkNotOut(t, sout, "added")
}
})
// add files only
t.Run("add-only", func(t *testing.T) {
genFiles(filepath.Join(root, "way/add"), 99)
genFile(filepath.Join(root, "time/add-file.txt"), 500)
// modify existing, will not be reported:
genFile(filepath.Join(root, "way/job/word-business.mp3"), 500)
cmd := exec.Command(tool, "-a", root)
out, err := cmd.Output()
if err != nil {
t.Fatalf("failed with '%s'\n", err)
}
sout := string(out)
checkOut(t, sout, "Processed 6 files")
checkOut(t, sout, "- 3 directories were updated")
checkOut(t, sout, "- 6 file hashes were added")
checkOut(t, sout, "- 0 file hashes were updated")
})
// update remaining
t.Run("update-remaining-add", func(t *testing.T) {
cmd := exec.Command(tool, "-u", root)
out, err := cmd.Output()
if err != nil {
t.Fatalf("failed with '%s'\n", err)
}
sout := string(out)
checkOut(t, sout, "Processed 295 files")
checkOut(t, sout, "- 1 directory was updated")
checkOut(t, sout, "- 0 file hashes were added")
checkOut(t, sout, "- 1 file hash was updated")
})
}
func TestDMG(t *testing.T) {
testDmg := filepath.Join(testDir, "test_dmg")
if err := os.RemoveAll(testDmg); err != nil {
fmt.Println("Failed to clean", err)
panic(err)
}
if err := os.MkdirAll(testDmg, 0755); err != nil {
fmt.Println("Failed to create test directory", err)
panic(err)
}
if err := os.Chdir(testDmg); err != nil {
fmt.Println("Failed to cd test directory", err)
panic(err)
}
tool := getCmd()
testFile := filepath.Join(testDmg, "test.txt")
t1, _ := time.Parse(time.RFC3339, "2022-02-01T11:00:00Z")
t2, _ := time.Parse(time.RFC3339, "2022-02-01T12:00:00Z")
t3, _ := time.Parse(time.RFC3339, "2022-02-01T13:00:00Z")
// create test and set the modified time"
t.Run("create", func(t *testing.T) {
os.WriteFile(testFile, []byte("foo1"), 0644)
os.Chtimes(testFile, t2, t2)
cmd := exec.Command(tool, "-u", ".")
if out, err := cmd.Output(); err != nil {
t.Fatalf("failed with '%s'\n", err)
} else {
checkOut(t, string(out), "new test.txt")
}
})
// update test with different content & old modified (expect 'old')"
t.Run("expect-old", func(t *testing.T) {
os.WriteFile(testFile, []byte("foo2"), 0644)
os.Chtimes(testFile, t1, t1)
cmd := exec.Command(tool, "-u", ".")
if out, err := cmd.Output(); err != nil {
t.Fatalf("failed with '%s'\n", err)
} else {
checkOut(t, string(out), "old test.txt")
}
})
// update test & new modified (expect 'upd')"
t.Run("expect-upd", func(t *testing.T) {
os.WriteFile(testFile, []byte("foo3"), 0644)
os.Chtimes(testFile, t3, t3)
cmd := exec.Command(tool, "-u", ".")
if out, err := cmd.Output(); err != nil {
t.Fatalf("failed with '%s'\n", err)
} else {
checkOut(t, string(out), "upd test.txt")
}
})
// Now update test with the same modified to simulate damage (expect DMG)"
t.Run("expect-DMG", func(t *testing.T) {
os.WriteFile(testFile, []byte("foo4"), 0644)
os.Chtimes(testFile, t3, t3)
cmd := exec.Command(tool, "-u", ".")
if out, err := cmd.Output(); err != nil {
if cmd.ProcessState.ExitCode() != 1 {
t.Fatalf("expected to fail with exit code 1 vs %d!", cmd.ProcessState.ExitCode())
}
checkOut(t, string(out), "DMG test.txt")
} else {
t.Fatal("expected to fail!")
}
})
}

View File

@ -1,11 +0,0 @@
#!/bin/bash
set -e
script_dir=$(dirname "$(realpath "$0")")
cd $script_dir/..
# prep
$script_dir/build
go test -v ./cmd/chkbit/util -count=1
go test -v ./scripts -count=1

View File

@ -1,55 +0,0 @@
#!/bin/bash
set -eE -o pipefail
script_dir=$(dirname "$(realpath "$0")")
cd $script_dir/..
if [ -z "$version" ]; then
version=$(git rev-parse HEAD)
fi
echo "building version $version"
mkdir -p dist
rm -f dist/*
build() {
echo "- $1-$2"
rm -f dist/chkbit
CGO_ENABLED=0 GOOS="$1" GOARCH="$2" go build -o dist -ldflags="-X main.appVersion=$version" ./cmd/chkbit
pushd dist
case "$1" in
windows)
outfile="chkbit-$1-$2.zip"
zip "$outfile" chkbit.exe --move
;;
*)
outfile="chkbit-$1-$2.tar.gz"
tar -czf "$outfile" chkbit --remove-files
;;
esac
popd
}
if [[ -z $2 ]]; then
build android arm64
build darwin amd64
build darwin arm64
build freebsd amd64
build freebsd arm64
build freebsd riscv64
build linux amd64
build linux arm64
build linux riscv64
build netbsd amd64
build netbsd arm64
build openbsd amd64
build openbsd arm64
build windows amd64
build windows arm64
else
build $1 $2
fi

View File

@ -1,38 +0,0 @@
package chkbit
type Status string
const (
STATUS_PANIC Status = "EXC"
STATUS_ERR_IDX Status = "EIX"
STATUS_ERR_DMG Status = "DMG"
STATUS_UPDATE_INDEX Status = "iup"
STATUS_UP_WARN_OLD Status = "old"
STATUS_UPDATE Status = "upd"
STATUS_NEW Status = "new"
STATUS_OK Status = "ok "
STATUS_IGNORE Status = "ign"
STATUS_MISSING Status = "del"
)
func (s Status) String() string {
return (string)(s)
}
func (s Status) IsErrorOrWarning() bool {
return s == STATUS_PANIC || s == STATUS_ERR_DMG || s == STATUS_ERR_IDX || s == STATUS_UP_WARN_OLD
}
func (s Status) IsVerbose() bool {
return s == STATUS_OK || s == STATUS_IGNORE
}
type LogEvent struct {
Stat Status
Message string
}
type PerfEvent struct {
NumFiles int64
NumBytes int64
}

View File

@ -1,38 +0,0 @@
package chkbit
type WorkItem struct {
path string
filesToIndex []string
dirList []string
ignore *Ignore
}
func (context *Context) runWorker(id int) {
for {
item := <-context.WorkQueue
if item == nil {
break
}
index := newIndex(context, item.path, item.filesToIndex, item.dirList, !context.UpdateIndex)
err := index.load()
if err != nil {
context.log(STATUS_PANIC, index.getIndexFilepath()+": "+err.Error())
}
if context.ShowIgnoredOnly {
index.showIgnoredOnly(item.ignore)
} else {
index.calcHashes(item.ignore)
index.checkFix(context.ForceUpdateDmg)
if context.UpdateIndex {
if changed, err := index.save(); err != nil {
context.logErr(item.path, err)
} else if changed {
context.log(STATUS_UPDATE_INDEX, "")
}
}
}
}
}