chkbit/index.go

298 lines
6.0 KiB
Go
Raw Normal View History

2024-08-16 23:10:58 +02:00
package chkbit
2024-08-15 23:30:29 +02:00
import (
"encoding/json"
"errors"
"os"
"path/filepath"
"reflect"
"slices"
2024-08-15 23:30:29 +02:00
)
const VERSION = 2 // index version
var (
algoMd5 = "md5"
)
2024-08-20 16:45:07 +02:00
type idxInfo struct {
2024-08-15 23:30:29 +02:00
ModTime int64 `json:"mod"`
Algo *string `json:"a,omitempty"`
Hash *string `json:"h,omitempty"`
LegacyHash *string `json:"md5,omitempty"`
}
2024-08-20 16:45:07 +02:00
type indexFile struct {
2024-08-19 22:00:38 +02:00
V int `json:"v"`
2024-08-20 16:45:07 +02:00
// IdxRaw -> map[string]idxInfo
2024-08-15 23:30:29 +02:00
IdxRaw json.RawMessage `json:"idx"`
IdxHash string `json:"idx_hash"`
// 2024-08 optional, list of subdirectories
Dir []string `json:"dirlist,omitempty"`
2024-08-15 23:30:29 +02:00
}
2024-08-20 16:45:07 +02:00
type idxInfo1 struct {
2024-08-15 23:30:29 +02:00
ModTime int64 `json:"mod"`
Hash string `json:"md5"`
}
2024-08-20 16:45:07 +02:00
type indexFile1 struct {
Data map[string]idxInfo1 `json:"data"`
2024-08-15 23:30:29 +02:00
}
type Index struct {
context *Context
path string
files []string
cur map[string]idxInfo
new map[string]idxInfo
curDirList []string
newDirList []string
modified bool
readonly bool
2024-08-15 23:30:29 +02:00
}
func newIndex(context *Context, path string, files []string, dirList []string, readonly bool) *Index {
slices.Sort(dirList)
2024-08-15 23:30:29 +02:00
return &Index{
context: context,
path: path,
files: files,
cur: make(map[string]idxInfo),
new: make(map[string]idxInfo),
curDirList: make([]string, 0),
newDirList: dirList,
readonly: readonly,
2024-08-15 23:30:29 +02:00
}
}
func (i *Index) getIndexFilepath() string {
return filepath.Join(i.path, i.context.IndexFilename)
}
func (i *Index) setMod(value bool) {
i.modified = value
}
func (i *Index) logFilePanic(name string, message string) {
i.context.log(STATUS_PANIC, filepath.Join(i.path, name)+": "+message)
}
func (i *Index) logFile(stat Status, name string) {
i.context.log(stat, filepath.Join(i.path, name))
}
func (i *Index) logDir(stat Status, name string) {
i.context.log(stat, filepath.Join(i.path, name)+"/")
}
2024-08-15 23:30:29 +02:00
func (i *Index) calcHashes(ignore *Ignore) {
for _, name := range i.files {
if ignore != nil && ignore.shouldIgnore(name) {
i.logFile(STATUS_IGNORE, name)
continue
}
var err error
2024-08-20 16:45:07 +02:00
var info *idxInfo
2024-08-15 23:30:29 +02:00
algo := i.context.HashAlgo
if val, ok := i.cur[name]; ok {
// existing
if val.LegacyHash != nil {
// convert from py1 to new format
2024-08-20 16:45:07 +02:00
val = idxInfo{
2024-08-15 23:30:29 +02:00
ModTime: val.ModTime,
Algo: &algoMd5,
Hash: val.LegacyHash,
}
i.cur[name] = val
}
if val.Algo != nil {
algo = *val.Algo
}
info, err = i.calcFile(name, algo)
} else {
if i.readonly {
2024-08-20 16:45:07 +02:00
info = &idxInfo{Algo: &algo}
2024-08-15 23:30:29 +02:00
} else {
info, err = i.calcFile(name, algo)
}
}
if err != nil {
i.logFilePanic(name, err.Error())
} else {
i.new[name] = *info
}
}
}
func (i *Index) showIgnoredOnly(ignore *Ignore) {
for _, name := range i.files {
if ignore.shouldIgnore(name) {
i.logFile(STATUS_IGNORE, name)
}
}
}
2024-08-19 22:00:38 +02:00
func (i *Index) checkFix(forceUpdateDmg bool) {
2024-08-15 23:30:29 +02:00
for name, b := range i.new {
if a, ok := i.cur[name]; !ok {
i.logFile(STATUS_NEW, name)
i.setMod(true)
} else {
amod := int64(a.ModTime)
bmod := int64(b.ModTime)
if a.Hash != nil && b.Hash != nil && *a.Hash == *b.Hash {
i.logFile(STATUS_OK, name)
if amod != bmod {
i.setMod(true)
}
continue
}
if amod == bmod {
i.logFile(STATUS_ERR_DMG, name)
2024-08-19 22:00:38 +02:00
if !forceUpdateDmg {
// keep DMG entry
2024-08-15 23:30:29 +02:00
i.new[name] = a
} else {
i.setMod(true)
}
} else if amod < bmod {
i.logFile(STATUS_UPDATE, name)
i.setMod(true)
} else if amod > bmod {
2024-08-18 21:36:22 +02:00
i.logFile(STATUS_UP_WARN_OLD, name)
2024-08-15 23:30:29 +02:00
i.setMod(true)
}
}
}
if i.context.ShowMissing {
for name := range i.cur {
if _, ok := i.new[name]; !ok {
i.logFile(STATUS_MISSING, name)
i.setMod(true)
}
}
// dirs
m := make(map[string]bool)
for _, n := range i.newDirList {
m[n] = true
}
for _, name := range i.curDirList {
if !m[name] {
i.logDir(STATUS_MISSING, name+"/")
i.setMod(true)
}
}
}
2024-08-15 23:30:29 +02:00
}
2024-08-20 16:45:07 +02:00
func (i *Index) calcFile(name string, a string) (*idxInfo, error) {
2024-08-15 23:30:29 +02:00
path := filepath.Join(i.path, name)
info, _ := os.Stat(path)
mtime := int64(info.ModTime().UnixNano() / 1e6)
h, err := Hashfile(path, a, i.context.perfMonBytes)
if err != nil {
return nil, err
}
i.context.perfMonFiles(1)
2024-08-20 16:45:07 +02:00
return &idxInfo{
2024-08-15 23:30:29 +02:00
ModTime: mtime,
Algo: &a,
Hash: &h,
}, nil
}
func (i *Index) save() (bool, error) {
if i.modified {
if i.readonly {
return false, errors.New("Error trying to save a readonly index.")
}
text, err := json.Marshal(i.new)
if err != nil {
return false, err
}
2024-08-20 16:45:07 +02:00
data := indexFile{
2024-08-15 23:30:29 +02:00
V: VERSION,
IdxRaw: text,
2024-08-20 16:45:07 +02:00
IdxHash: hashMd5(text),
2024-08-15 23:30:29 +02:00
}
if i.context.TrackDirectories {
data.Dir = i.newDirList
}
2024-08-15 23:30:29 +02:00
file, err := json.Marshal(data)
if err != nil {
return false, err
}
err = os.WriteFile(i.getIndexFilepath(), file, 0644)
if err != nil {
return false, err
}
i.setMod(false)
return true, nil
} else {
return false, nil
}
}
func (i *Index) load() error {
if _, err := os.Stat(i.getIndexFilepath()); err != nil {
if os.IsNotExist(err) {
// todo
i.setMod(true)
2024-08-15 23:30:29 +02:00
return nil
}
return err
}
i.setMod(false)
file, err := os.ReadFile(i.getIndexFilepath())
if err != nil {
return err
}
2024-08-20 16:45:07 +02:00
var data indexFile
2024-08-15 23:30:29 +02:00
err = json.Unmarshal(file, &data)
if err != nil {
return err
}
if data.IdxRaw != nil {
err = json.Unmarshal(data.IdxRaw, &i.cur)
if err != nil {
return err
}
text := data.IdxRaw
2024-08-20 16:45:07 +02:00
if data.IdxHash != hashMd5(text) {
2024-08-19 20:42:42 +02:00
// old versions may have saved the JSON encoded with extra spaces
2024-08-15 23:30:29 +02:00
text, _ = json.Marshal(data.IdxRaw)
} else {
}
2024-08-20 16:45:07 +02:00
if data.IdxHash != hashMd5(text) {
2024-08-15 23:30:29 +02:00
i.setMod(true)
i.logFile(STATUS_ERR_IDX, i.getIndexFilepath())
}
} else {
2024-08-20 16:45:07 +02:00
var data1 indexFile1
2024-08-15 23:30:29 +02:00
json.Unmarshal(file, &data1)
if data1.Data != nil {
// convert from js to new format
for name, item := range data1.Data {
2024-08-20 16:45:07 +02:00
i.cur[name] = idxInfo{
2024-08-15 23:30:29 +02:00
ModTime: item.ModTime,
Algo: &algoMd5,
Hash: &item.Hash,
}
}
}
}
if data.Dir != nil {
slices.Sort(data.Dir)
i.curDirList = data.Dir
if i.context.TrackDirectories && !reflect.DeepEqual(i.curDirList, i.newDirList) {
i.setMod(true)
}
}
2024-08-15 23:30:29 +02:00
return nil
}