2018-08-27 19:53:10 +00:00
|
|
|
package buffer
|
2016-03-17 21:27:57 +00:00
|
|
|
|
|
|
|
|
import (
|
2020-02-09 20:36:31 +00:00
|
|
|
"bufio"
|
2019-01-15 03:29:24 +00:00
|
|
|
"bytes"
|
2017-09-18 03:33:18 +00:00
|
|
|
"crypto/md5"
|
2018-01-17 02:54:13 +00:00
|
|
|
"errors"
|
2020-02-27 17:39:19 +00:00
|
|
|
"fmt"
|
2016-11-29 18:44:30 +00:00
|
|
|
"io"
|
2024-05-29 18:33:46 +00:00
|
|
|
"io/fs"
|
2016-05-28 21:29:49 +00:00
|
|
|
"os"
|
|
|
|
|
"path/filepath"
|
2019-06-15 22:22:36 +00:00
|
|
|
"strconv"
|
2016-08-30 15:28:28 +00:00
|
|
|
"strings"
|
2020-01-29 01:54:14 +00:00
|
|
|
"sync"
|
2020-09-04 17:36:23 +00:00
|
|
|
"sync/atomic"
|
2016-05-29 21:58:06 +00:00
|
|
|
"time"
|
2016-12-13 13:58:08 +00:00
|
|
|
|
2019-03-19 22:28:51 +00:00
|
|
|
luar "layeh.com/gopher-luar"
|
|
|
|
|
|
2020-02-10 19:49:08 +00:00
|
|
|
dmp "github.com/sergi/go-diff/diffmatchpatch"
|
2020-05-04 14:16:15 +00:00
|
|
|
"github.com/zyedidia/micro/v2/internal/config"
|
|
|
|
|
ulua "github.com/zyedidia/micro/v2/internal/lua"
|
|
|
|
|
"github.com/zyedidia/micro/v2/internal/screen"
|
|
|
|
|
"github.com/zyedidia/micro/v2/internal/util"
|
|
|
|
|
"github.com/zyedidia/micro/v2/pkg/highlight"
|
2025-02-06 18:54:47 +00:00
|
|
|
"golang.org/x/text/encoding"
|
2019-01-24 00:06:20 +00:00
|
|
|
"golang.org/x/text/encoding/htmlindex"
|
|
|
|
|
"golang.org/x/text/encoding/unicode"
|
|
|
|
|
"golang.org/x/text/transform"
|
2016-03-17 21:27:57 +00:00
|
|
|
)
|
|
|
|
|
|
2019-01-24 00:06:20 +00:00
|
|
|
var (
|
2020-01-21 04:43:33 +00:00
|
|
|
// OpenBuffers is a list of the currently open buffers
|
2019-01-24 00:06:20 +00:00
|
|
|
OpenBuffers []*Buffer
|
2020-01-21 04:43:33 +00:00
|
|
|
// LogBuf is a reference to the log buffer which can be opened with the
|
|
|
|
|
// `> log` command
|
|
|
|
|
LogBuf *Buffer
|
2019-01-24 00:06:20 +00:00
|
|
|
)
|
|
|
|
|
|
2018-08-26 03:06:44 +00:00
|
|
|
// The BufType defines what kind of buffer this is
|
|
|
|
|
type BufType struct {
|
|
|
|
|
Kind int
|
2019-01-19 20:37:59 +00:00
|
|
|
Readonly bool // The buffer cannot be edited
|
|
|
|
|
Scratch bool // The buffer cannot be saved
|
2019-01-01 03:07:01 +00:00
|
|
|
Syntax bool // Syntax highlighting is enabled
|
2018-08-26 03:06:44 +00:00
|
|
|
}
|
|
|
|
|
|
2017-08-24 17:13:14 +00:00
|
|
|
var (
|
2020-01-21 04:43:33 +00:00
|
|
|
// BTDefault is a default buffer
|
2019-01-01 03:07:01 +00:00
|
|
|
BTDefault = BufType{0, false, false, true}
|
2020-01-21 04:43:33 +00:00
|
|
|
// BTHelp is a help buffer
|
|
|
|
|
BTHelp = BufType{1, true, true, true}
|
|
|
|
|
// BTLog is a log buffer
|
|
|
|
|
BTLog = BufType{2, true, true, false}
|
|
|
|
|
// BTScratch is a buffer that cannot be saved (for scratch work)
|
2019-01-01 03:07:01 +00:00
|
|
|
BTScratch = BufType{3, false, true, false}
|
2024-03-23 16:02:41 +00:00
|
|
|
// BTRaw is a buffer that shows raw terminal events
|
2020-01-21 04:43:33 +00:00
|
|
|
BTRaw = BufType{4, false, true, false}
|
|
|
|
|
// BTInfo is a buffer for inputting information
|
|
|
|
|
BTInfo = BufType{5, false, true, false}
|
2020-02-27 17:39:19 +00:00
|
|
|
// BTStdout is a buffer that only writes to stdout
|
|
|
|
|
// when closed
|
|
|
|
|
BTStdout = BufType{6, false, true, true}
|
2017-08-24 17:13:14 +00:00
|
|
|
)
|
|
|
|
|
|
2020-01-21 04:43:33 +00:00
|
|
|
// SharedBuffer is a struct containing info that is shared among buffers
|
|
|
|
|
// that have the same file open
|
2019-01-14 21:52:25 +00:00
|
|
|
type SharedBuffer struct {
|
|
|
|
|
*LineArray
|
|
|
|
|
// Stores the last modification time of the file the buffer is pointing to
|
|
|
|
|
ModTime time.Time
|
|
|
|
|
// Type of the buffer (e.g. help, raw, scratch etc..)
|
|
|
|
|
Type BufType
|
|
|
|
|
|
2020-02-19 19:41:30 +00:00
|
|
|
// Path to the file on disk
|
|
|
|
|
Path string
|
|
|
|
|
// Absolute path to the file on disk
|
|
|
|
|
AbsPath string
|
|
|
|
|
// Name of the buffer on the status line
|
|
|
|
|
name string
|
|
|
|
|
|
2020-02-27 17:39:19 +00:00
|
|
|
toStdout bool
|
|
|
|
|
|
2020-02-19 19:41:30 +00:00
|
|
|
// Settings customized by the user
|
|
|
|
|
Settings map[string]interface{}
|
2024-07-26 19:06:06 +00:00
|
|
|
// LocalSettings customized by the user for this buffer only
|
|
|
|
|
LocalSettings map[string]bool
|
2020-02-19 19:41:30 +00:00
|
|
|
|
2025-02-06 18:54:47 +00:00
|
|
|
encoding encoding.Encoding
|
|
|
|
|
|
2020-02-19 19:41:30 +00:00
|
|
|
Suggestions []string
|
|
|
|
|
Completions []string
|
|
|
|
|
CurSuggestion int
|
|
|
|
|
|
|
|
|
|
Messages []*Message
|
|
|
|
|
|
|
|
|
|
updateDiffTimer *time.Timer
|
|
|
|
|
diffBase []byte
|
|
|
|
|
diffBaseLineCount int
|
|
|
|
|
diffLock sync.RWMutex
|
|
|
|
|
diff map[int]DiffStatus
|
|
|
|
|
|
2024-10-12 14:35:47 +00:00
|
|
|
RequestedBackup bool
|
2024-05-29 20:33:33 +00:00
|
|
|
forceKeepBackup bool
|
2020-02-19 19:41:30 +00:00
|
|
|
|
2020-02-12 18:56:00 +00:00
|
|
|
// ReloadDisabled allows the user to disable reloads if they
|
|
|
|
|
// are viewing a file that is constantly changing
|
|
|
|
|
ReloadDisabled bool
|
|
|
|
|
|
2019-01-14 21:52:25 +00:00
|
|
|
isModified bool
|
2019-01-24 23:09:57 +00:00
|
|
|
// Whether or not suggestions can be autocompleted must be shared because
|
|
|
|
|
// it changes based on how the buffer has changed
|
|
|
|
|
HasSuggestions bool
|
2020-02-02 19:20:39 +00:00
|
|
|
|
2020-02-19 19:41:30 +00:00
|
|
|
// The Highlighter struct actually performs the highlighting
|
|
|
|
|
Highlighter *highlight.Highlighter
|
|
|
|
|
// SyntaxDef represents the syntax highlighting definition being used
|
|
|
|
|
// This stores the highlighting rules and filetype detection info
|
|
|
|
|
SyntaxDef *highlight.Def
|
|
|
|
|
|
|
|
|
|
ModifiedThisFrame bool
|
2020-02-12 18:32:42 +00:00
|
|
|
|
|
|
|
|
// Hash of the original buffer -- empty if fastdirty is on
|
|
|
|
|
origHash [md5.Size]byte
|
2019-01-14 21:52:25 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
func (b *SharedBuffer) insert(pos Loc, value []byte) {
|
|
|
|
|
b.isModified = true
|
2019-01-24 23:09:57 +00:00
|
|
|
b.HasSuggestions = false
|
2019-01-14 21:52:25 +00:00
|
|
|
b.LineArray.insert(pos, value)
|
2020-02-02 19:20:39 +00:00
|
|
|
|
2020-02-19 19:41:30 +00:00
|
|
|
inslines := bytes.Count(value, []byte{'\n'})
|
|
|
|
|
b.MarkModified(pos.Y, pos.Y+inslines)
|
2019-01-14 21:52:25 +00:00
|
|
|
}
|
|
|
|
|
func (b *SharedBuffer) remove(start, end Loc) []byte {
|
|
|
|
|
b.isModified = true
|
2019-01-24 23:09:57 +00:00
|
|
|
b.HasSuggestions = false
|
2020-02-19 19:41:30 +00:00
|
|
|
defer b.MarkModified(start.Y, end.Y)
|
2019-01-14 21:52:25 +00:00
|
|
|
return b.LineArray.remove(start, end)
|
|
|
|
|
}
|
|
|
|
|
|
2020-02-19 19:41:30 +00:00
|
|
|
// MarkModified marks the buffer as modified for this frame
|
|
|
|
|
// and performs rehighlighting if syntax highlighting is enabled
|
|
|
|
|
func (b *SharedBuffer) MarkModified(start, end int) {
|
|
|
|
|
b.ModifiedThisFrame = true
|
|
|
|
|
|
2020-05-17 20:05:34 +00:00
|
|
|
start = util.Clamp(start, 0, len(b.lines)-1)
|
|
|
|
|
end = util.Clamp(end, 0, len(b.lines)-1)
|
2020-02-19 19:41:30 +00:00
|
|
|
|
2021-09-28 20:39:03 +00:00
|
|
|
if b.Settings["syntax"].(bool) && b.SyntaxDef != nil {
|
|
|
|
|
l := -1
|
|
|
|
|
for i := start; i <= end; i++ {
|
|
|
|
|
l = util.Max(b.Highlighter.ReHighlightStates(b, i), l)
|
|
|
|
|
}
|
|
|
|
|
b.Highlighter.HighlightMatches(b, start, l)
|
|
|
|
|
}
|
|
|
|
|
|
2020-02-19 19:41:30 +00:00
|
|
|
for i := start; i <= end; i++ {
|
2021-09-28 20:39:03 +00:00
|
|
|
b.LineArray.invalidateSearchMatches(i)
|
2020-02-19 19:41:30 +00:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2020-02-12 18:56:00 +00:00
|
|
|
// DisableReload disables future reloads of this sharedbuffer
|
|
|
|
|
func (b *SharedBuffer) DisableReload() {
|
|
|
|
|
b.ReloadDisabled = true
|
|
|
|
|
}
|
|
|
|
|
|
2020-02-08 07:56:24 +00:00
|
|
|
const (
|
|
|
|
|
DSUnchanged = 0
|
|
|
|
|
DSAdded = 1
|
|
|
|
|
DSModified = 2
|
|
|
|
|
DSDeletedAbove = 3
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
type DiffStatus byte
|
|
|
|
|
|
2018-12-31 19:46:04 +00:00
|
|
|
// Buffer stores the main information about a currently open file including
|
|
|
|
|
// the actual text (in a LineArray), the undo/redo stack (in an EventHandler)
|
|
|
|
|
// all the cursors, the syntax highlighting info, the settings for the buffer
|
|
|
|
|
// and some misc info about modification time and path location.
|
|
|
|
|
// The syntax highlighting info must be stored with the buffer because the syntax
|
|
|
|
|
// highlighter attaches information to each line of the buffer for optimization
|
|
|
|
|
// purposes so it doesn't have to rehighlight everything on every update.
|
2021-09-28 20:39:03 +00:00
|
|
|
// Likewise for the search highlighting.
|
2016-03-17 21:27:57 +00:00
|
|
|
type Buffer struct {
|
2018-08-26 03:06:44 +00:00
|
|
|
*EventHandler
|
2019-01-14 21:52:25 +00:00
|
|
|
*SharedBuffer
|
2016-05-22 19:01:02 +00:00
|
|
|
|
2020-09-04 17:36:23 +00:00
|
|
|
fini int32
|
2018-08-27 19:53:10 +00:00
|
|
|
cursors []*Cursor
|
2019-01-02 22:39:50 +00:00
|
|
|
curCursor int
|
2018-08-27 19:53:10 +00:00
|
|
|
StartCursor Loc
|
2021-03-03 19:51:06 +00:00
|
|
|
|
|
|
|
|
// OptionCallback is called after a buffer option value is changed.
|
|
|
|
|
// The display module registers its OptionCallback to ensure the buffer window
|
|
|
|
|
// is properly updated when needed. This is a workaround for the fact that
|
|
|
|
|
// the buffer module cannot directly call the display's API (it would mean
|
|
|
|
|
// a circular dependency between packages).
|
|
|
|
|
OptionCallback func(option string, nativeValue interface{})
|
2021-03-06 22:43:36 +00:00
|
|
|
|
|
|
|
|
// The display module registers its own GetVisualX function for getting
|
|
|
|
|
// the correct visual x location of a cursor when softwrap is used.
|
|
|
|
|
// This is hacky. Maybe it would be better to move all the visual x logic
|
|
|
|
|
// from buffer to display, but it would require rewriting a lot of code.
|
|
|
|
|
GetVisualX func(loc Loc) int
|
2021-09-28 20:39:03 +00:00
|
|
|
|
|
|
|
|
// Last search stores the last successful search
|
|
|
|
|
LastSearch string
|
|
|
|
|
LastSearchRegex bool
|
|
|
|
|
// HighlightSearch enables highlighting all instances of the last successful search
|
|
|
|
|
HighlightSearch bool
|
2025-01-20 20:49:50 +00:00
|
|
|
|
|
|
|
|
// OverwriteMode indicates that we are in overwrite mode (toggled by
|
|
|
|
|
// Insert key by default) i.e. that typing a character shall replace the
|
|
|
|
|
// character under the cursor instead of inserting a character before it.
|
|
|
|
|
OverwriteMode bool
|
2016-05-29 15:02:56 +00:00
|
|
|
}
|
|
|
|
|
|
2020-05-30 02:48:23 +00:00
|
|
|
// NewBufferFromFileAtLoc opens a new buffer with a given cursor location
|
|
|
|
|
// If cursorLoc is {-1, -1} the location does not overwrite what the cursor location
|
|
|
|
|
// would otherwise be (start of file, or saved cursor position if `savecursor` is
|
|
|
|
|
// enabled)
|
|
|
|
|
func NewBufferFromFileAtLoc(path string, btype BufType, cursorLoc Loc) (*Buffer, error) {
|
2018-08-26 03:06:44 +00:00
|
|
|
var err error
|
2020-05-29 18:55:24 +00:00
|
|
|
filename := path
|
2020-05-30 02:48:23 +00:00
|
|
|
if config.GetGlobalOption("parsecursor").(bool) && cursorLoc.X == -1 && cursorLoc.Y == -1 {
|
|
|
|
|
var cursorPos []string
|
|
|
|
|
filename, cursorPos = util.GetPathAndCursorPosition(filename)
|
|
|
|
|
cursorLoc, err = ParseCursorLocation(cursorPos)
|
|
|
|
|
if err != nil {
|
|
|
|
|
cursorLoc = Loc{-1, -1}
|
|
|
|
|
}
|
2020-05-29 18:55:24 +00:00
|
|
|
}
|
2020-05-30 02:48:23 +00:00
|
|
|
|
2019-12-26 22:59:23 +00:00
|
|
|
filename, err = util.ReplaceHome(filename)
|
2018-08-26 03:06:44 +00:00
|
|
|
if err != nil {
|
|
|
|
|
return nil, err
|
|
|
|
|
}
|
|
|
|
|
|
2020-12-18 02:54:18 +00:00
|
|
|
fileInfo, serr := os.Stat(filename)
|
2024-05-29 18:33:46 +00:00
|
|
|
if serr != nil && !errors.Is(serr, fs.ErrNotExist) {
|
2020-12-18 02:54:18 +00:00
|
|
|
return nil, serr
|
|
|
|
|
}
|
2020-12-23 20:21:01 +00:00
|
|
|
if serr == nil && fileInfo.IsDir() {
|
2020-01-02 20:25:07 +00:00
|
|
|
return nil, errors.New("Error: " + filename + " is a directory and cannot be opened")
|
2018-02-02 18:57:02 +00:00
|
|
|
}
|
2024-05-12 18:54:38 +00:00
|
|
|
if serr == nil && !fileInfo.Mode().IsRegular() {
|
|
|
|
|
return nil, errors.New("Error: " + filename + " is not a regular file and cannot be opened")
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
f, err := os.OpenFile(filename, os.O_WRONLY, 0)
|
2024-05-29 18:33:46 +00:00
|
|
|
readonly := errors.Is(err, fs.ErrPermission)
|
2024-05-12 18:54:38 +00:00
|
|
|
f.Close()
|
2018-02-02 18:57:02 +00:00
|
|
|
|
2020-12-23 20:21:01 +00:00
|
|
|
file, err := os.Open(filename)
|
|
|
|
|
if err == nil {
|
|
|
|
|
defer file.Close()
|
|
|
|
|
}
|
2018-02-02 18:57:02 +00:00
|
|
|
|
|
|
|
|
var buf *Buffer
|
2024-05-29 18:33:46 +00:00
|
|
|
if errors.Is(err, fs.ErrNotExist) {
|
2018-02-02 18:57:02 +00:00
|
|
|
// File does not exist -- create an empty buffer with that name
|
2019-01-01 03:07:01 +00:00
|
|
|
buf = NewBufferFromString("", filename, btype)
|
2020-12-18 02:54:18 +00:00
|
|
|
} else if err != nil {
|
|
|
|
|
return nil, err
|
2018-02-02 18:57:02 +00:00
|
|
|
} else {
|
2019-12-26 22:59:23 +00:00
|
|
|
buf = NewBuffer(file, util.FSize(file), filename, cursorLoc, btype)
|
2021-08-03 01:05:22 +00:00
|
|
|
if buf == nil {
|
|
|
|
|
return nil, errors.New("could not open file")
|
|
|
|
|
}
|
2018-02-02 18:57:02 +00:00
|
|
|
}
|
|
|
|
|
|
2021-05-19 18:58:00 +00:00
|
|
|
if readonly && prompt != nil {
|
2021-08-20 17:55:59 +00:00
|
|
|
prompt.Message(fmt.Sprintf("Warning: file is readonly - %s will be attempted when saving", config.GlobalSettings["sucmd"].(string)))
|
2021-05-19 18:58:00 +00:00
|
|
|
// buf.SetOptionNative("readonly", true)
|
2020-12-23 20:21:20 +00:00
|
|
|
}
|
2020-12-18 02:54:18 +00:00
|
|
|
|
2018-02-02 18:57:02 +00:00
|
|
|
return buf, nil
|
|
|
|
|
}
|
|
|
|
|
|
2020-05-30 02:48:23 +00:00
|
|
|
// NewBufferFromFile opens a new buffer using the given path
|
|
|
|
|
// It will also automatically handle `~`, and line/column with filename:l:c
|
|
|
|
|
// It will return an empty buffer if the path does not exist
|
|
|
|
|
// and an error if the file is a directory
|
|
|
|
|
func NewBufferFromFile(path string, btype BufType) (*Buffer, error) {
|
|
|
|
|
return NewBufferFromFileAtLoc(path, btype, Loc{-1, -1})
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// NewBufferFromStringAtLoc creates a new buffer containing the given string with a cursor loc
|
|
|
|
|
func NewBufferFromStringAtLoc(text, path string, btype BufType, cursorLoc Loc) *Buffer {
|
|
|
|
|
return NewBuffer(strings.NewReader(text), int64(len(text)), path, cursorLoc, btype)
|
|
|
|
|
}
|
|
|
|
|
|
2018-06-03 21:13:03 +00:00
|
|
|
// NewBufferFromString creates a new buffer containing the given string
|
2019-01-01 03:07:01 +00:00
|
|
|
func NewBufferFromString(text, path string, btype BufType) *Buffer {
|
2019-06-15 22:22:36 +00:00
|
|
|
return NewBuffer(strings.NewReader(text), int64(len(text)), path, Loc{-1, -1}, btype)
|
2016-11-29 18:44:30 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// NewBuffer creates a new buffer from a given reader with a given path
|
2018-08-26 03:06:44 +00:00
|
|
|
// Ensure that ReadSettings and InitGlobalSettings have been called before creating
|
|
|
|
|
// a new buffer
|
2019-06-17 15:26:37 +00:00
|
|
|
// Places the cursor at startcursor. If startcursor is -1, -1 places the
|
|
|
|
|
// cursor at an autodetected location (based on savecursor or :LINE:COL)
|
2019-06-15 22:22:36 +00:00
|
|
|
func NewBuffer(r io.Reader, size int64, path string, startcursor Loc, btype BufType) *Buffer {
|
2021-11-23 02:45:19 +00:00
|
|
|
absPath, err := filepath.Abs(path)
|
|
|
|
|
if err != nil {
|
|
|
|
|
absPath = path
|
|
|
|
|
}
|
2019-01-14 02:06:58 +00:00
|
|
|
|
2016-03-17 21:27:57 +00:00
|
|
|
b := new(Buffer)
|
2016-06-08 17:26:50 +00:00
|
|
|
|
2019-01-14 05:18:49 +00:00
|
|
|
found := false
|
2019-01-14 05:57:39 +00:00
|
|
|
if len(path) > 0 {
|
|
|
|
|
for _, buf := range OpenBuffers {
|
2019-06-15 19:50:37 +00:00
|
|
|
if buf.AbsPath == absPath && buf.Type != BTInfo {
|
2019-01-14 05:57:39 +00:00
|
|
|
found = true
|
2019-01-14 21:52:25 +00:00
|
|
|
b.SharedBuffer = buf.SharedBuffer
|
2019-01-14 05:57:39 +00:00
|
|
|
b.EventHandler = buf.EventHandler
|
|
|
|
|
}
|
2019-01-14 05:18:49 +00:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2020-06-22 21:54:56 +00:00
|
|
|
hasBackup := false
|
2019-12-22 23:05:23 +00:00
|
|
|
if !found {
|
2019-01-14 21:52:25 +00:00
|
|
|
b.SharedBuffer = new(SharedBuffer)
|
|
|
|
|
b.Type = btype
|
2019-12-22 00:55:23 +00:00
|
|
|
|
2020-02-19 19:41:30 +00:00
|
|
|
b.AbsPath = absPath
|
|
|
|
|
b.Path = path
|
|
|
|
|
|
|
|
|
|
b.Settings = config.DefaultCommonSettings()
|
2024-07-26 19:06:06 +00:00
|
|
|
b.LocalSettings = make(map[string]bool)
|
2020-02-19 19:41:30 +00:00
|
|
|
for k, v := range config.GlobalSettings {
|
|
|
|
|
if _, ok := config.DefaultGlobalOnlySettings[k]; !ok {
|
|
|
|
|
// make sure setting is not global-only
|
|
|
|
|
b.Settings[k] = v
|
|
|
|
|
}
|
|
|
|
|
}
|
2025-02-17 19:11:14 +00:00
|
|
|
config.UpdatePathGlobLocals(b.Settings, absPath)
|
2020-02-19 19:41:30 +00:00
|
|
|
|
2025-02-06 18:54:47 +00:00
|
|
|
b.encoding, err = htmlindex.Get(b.Settings["encoding"].(string))
|
2020-02-19 19:41:30 +00:00
|
|
|
if err != nil {
|
2025-02-06 18:54:47 +00:00
|
|
|
b.encoding = unicode.UTF8
|
2020-02-19 19:41:30 +00:00
|
|
|
b.Settings["encoding"] = "utf-8"
|
|
|
|
|
}
|
|
|
|
|
|
2021-08-03 01:05:22 +00:00
|
|
|
var ok bool
|
|
|
|
|
hasBackup, ok = b.ApplyBackup(size)
|
2019-12-22 00:55:23 +00:00
|
|
|
|
2021-08-03 01:05:22 +00:00
|
|
|
if !ok {
|
|
|
|
|
return NewBufferFromString("", "", btype)
|
|
|
|
|
}
|
2019-12-22 23:05:23 +00:00
|
|
|
if !hasBackup {
|
2025-02-06 18:54:47 +00:00
|
|
|
reader := bufio.NewReader(transform.NewReader(r, b.encoding.NewDecoder()))
|
2020-06-06 19:56:13 +00:00
|
|
|
|
|
|
|
|
var ff FileFormat = FFAuto
|
|
|
|
|
|
|
|
|
|
if size == 0 {
|
|
|
|
|
// for empty files, use the fileformat setting instead of
|
|
|
|
|
// autodetection
|
2025-02-17 19:11:14 +00:00
|
|
|
switch b.Settings["fileformat"] {
|
2020-06-06 19:56:13 +00:00
|
|
|
case "unix":
|
|
|
|
|
ff = FFUnix
|
|
|
|
|
case "dos":
|
|
|
|
|
ff = FFDos
|
|
|
|
|
}
|
2024-08-08 20:09:46 +00:00
|
|
|
} else {
|
|
|
|
|
// in case of autodetection treat as locally set
|
|
|
|
|
b.LocalSettings["fileformat"] = true
|
2020-06-06 19:56:13 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
b.LineArray = NewLineArray(uint64(size), ff, reader)
|
2019-12-22 00:55:23 +00:00
|
|
|
}
|
2019-01-14 21:52:25 +00:00
|
|
|
b.EventHandler = NewEventHandler(b.SharedBuffer, b.cursors)
|
2020-02-19 22:40:54 +00:00
|
|
|
|
|
|
|
|
// The last time this file was modified
|
|
|
|
|
b.UpdateModTime()
|
2019-01-14 05:18:49 +00:00
|
|
|
}
|
2017-08-24 17:13:14 +00:00
|
|
|
|
2020-01-16 01:09:17 +00:00
|
|
|
if b.Settings["readonly"].(bool) && b.Type == BTDefault {
|
2019-06-17 21:45:38 +00:00
|
|
|
b.Type.Readonly = true
|
|
|
|
|
}
|
|
|
|
|
|
2019-01-15 04:24:49 +00:00
|
|
|
switch b.Endings {
|
|
|
|
|
case FFUnix:
|
|
|
|
|
b.Settings["fileformat"] = "unix"
|
|
|
|
|
case FFDos:
|
|
|
|
|
b.Settings["fileformat"] = "dos"
|
|
|
|
|
}
|
|
|
|
|
|
2016-03-26 14:54:18 +00:00
|
|
|
b.UpdateRules()
|
2025-02-17 19:11:14 +00:00
|
|
|
// we know the filetype now, so update per-filetype settings
|
|
|
|
|
config.UpdateFileTypeLocals(b.Settings, b.Settings["filetype"].(string))
|
2018-08-27 19:53:10 +00:00
|
|
|
|
2024-05-29 18:33:46 +00:00
|
|
|
if _, err := os.Stat(filepath.Join(config.ConfigDir, "buffers")); errors.Is(err, fs.ErrNotExist) {
|
2020-02-11 18:09:17 +00:00
|
|
|
os.Mkdir(filepath.Join(config.ConfigDir, "buffers"), os.ModePerm)
|
2018-08-27 19:53:10 +00:00
|
|
|
}
|
|
|
|
|
|
2019-06-15 22:22:36 +00:00
|
|
|
if startcursor.X != -1 && startcursor.Y != -1 {
|
|
|
|
|
b.StartCursor = startcursor
|
2021-01-09 18:39:21 +00:00
|
|
|
} else if b.Settings["savecursor"].(bool) || b.Settings["saveundo"].(bool) {
|
|
|
|
|
err := b.Unserialize()
|
|
|
|
|
if err != nil {
|
|
|
|
|
screen.TermMessage(err)
|
2018-08-27 19:53:10 +00:00
|
|
|
}
|
|
|
|
|
}
|
2016-05-28 21:29:49 +00:00
|
|
|
|
2019-01-14 05:18:49 +00:00
|
|
|
b.AddCursor(NewCursor(b, b.StartCursor))
|
2019-06-15 22:22:36 +00:00
|
|
|
b.GetActiveCursor().Relocate()
|
2019-01-14 05:18:49 +00:00
|
|
|
|
2020-02-19 22:40:54 +00:00
|
|
|
if !b.Settings["fastdirty"].(bool) && !found {
|
2018-05-26 14:07:53 +00:00
|
|
|
if size > LargeFileThreshold {
|
2018-08-26 03:06:44 +00:00
|
|
|
// If the file is larger than LargeFileThreshold fastdirty needs to be on
|
2017-09-24 01:18:37 +00:00
|
|
|
b.Settings["fastdirty"] = true
|
2020-06-22 21:54:56 +00:00
|
|
|
} else if !hasBackup {
|
|
|
|
|
// since applying a backup does not save the applied backup to disk, we should
|
|
|
|
|
// not calculate the original hash based on the backup data
|
2018-05-26 14:07:53 +00:00
|
|
|
calcHash(b, &b.origHash)
|
2017-09-24 01:18:37 +00:00
|
|
|
}
|
2017-09-18 03:33:18 +00:00
|
|
|
}
|
|
|
|
|
|
2021-11-23 02:46:36 +00:00
|
|
|
err = config.RunPluginFn("onBufferOpen", luar.New(ulua.L, b))
|
2019-03-19 22:28:51 +00:00
|
|
|
if err != nil {
|
|
|
|
|
screen.TermMessage(err)
|
|
|
|
|
}
|
|
|
|
|
|
2019-01-14 02:06:58 +00:00
|
|
|
OpenBuffers = append(OpenBuffers, b)
|
|
|
|
|
|
2016-03-17 21:27:57 +00:00
|
|
|
return b
|
|
|
|
|
}
|
|
|
|
|
|
2023-08-31 11:53:33 +00:00
|
|
|
// CloseOpenBuffers removes all open buffers
|
|
|
|
|
func CloseOpenBuffers() {
|
|
|
|
|
for i, buf := range OpenBuffers {
|
|
|
|
|
buf.Fini()
|
|
|
|
|
OpenBuffers[i] = nil
|
|
|
|
|
}
|
|
|
|
|
OpenBuffers = OpenBuffers[:0]
|
|
|
|
|
}
|
|
|
|
|
|
2019-01-14 02:06:58 +00:00
|
|
|
// Close removes this buffer from the list of open buffers
|
|
|
|
|
func (b *Buffer) Close() {
|
|
|
|
|
for i, buf := range OpenBuffers {
|
|
|
|
|
if b == buf {
|
2019-06-15 19:50:37 +00:00
|
|
|
b.Fini()
|
2019-01-14 02:06:58 +00:00
|
|
|
copy(OpenBuffers[i:], OpenBuffers[i+1:])
|
|
|
|
|
OpenBuffers[len(OpenBuffers)-1] = nil
|
|
|
|
|
OpenBuffers = OpenBuffers[:len(OpenBuffers)-1]
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2019-06-15 19:50:37 +00:00
|
|
|
// Fini should be called when a buffer is closed and performs
|
|
|
|
|
// some cleanup
|
|
|
|
|
func (b *Buffer) Fini() {
|
|
|
|
|
if !b.Modified() {
|
|
|
|
|
b.Serialize()
|
|
|
|
|
}
|
2019-12-22 00:55:23 +00:00
|
|
|
b.RemoveBackup()
|
2020-02-27 17:39:19 +00:00
|
|
|
|
|
|
|
|
if b.Type == BTStdout {
|
|
|
|
|
fmt.Fprint(util.Stdout, string(b.Bytes()))
|
|
|
|
|
}
|
2020-09-04 17:36:23 +00:00
|
|
|
|
|
|
|
|
atomic.StoreInt32(&(b.fini), int32(1))
|
2019-06-15 19:50:37 +00:00
|
|
|
}
|
|
|
|
|
|
2018-01-04 22:03:08 +00:00
|
|
|
// GetName returns the name that should be displayed in the statusline
|
|
|
|
|
// for this buffer
|
2016-11-20 00:07:51 +00:00
|
|
|
func (b *Buffer) GetName() string {
|
2020-02-24 18:48:37 +00:00
|
|
|
name := b.name
|
|
|
|
|
if name == "" {
|
2016-11-29 18:44:30 +00:00
|
|
|
if b.Path == "" {
|
|
|
|
|
return "No name"
|
|
|
|
|
}
|
2020-02-24 18:48:37 +00:00
|
|
|
name = b.Path
|
2016-11-20 00:07:51 +00:00
|
|
|
}
|
2020-02-24 18:48:37 +00:00
|
|
|
if b.Settings["basename"].(bool) {
|
2025-04-26 18:33:38 +00:00
|
|
|
return filepath.Base(name)
|
2020-02-24 18:48:37 +00:00
|
|
|
}
|
|
|
|
|
return name
|
2016-11-20 00:07:51 +00:00
|
|
|
}
|
|
|
|
|
|
2023-04-20 22:23:35 +00:00
|
|
|
// SetName changes the name for this buffer
|
2019-01-14 21:52:25 +00:00
|
|
|
func (b *Buffer) SetName(s string) {
|
|
|
|
|
b.name = s
|
|
|
|
|
}
|
|
|
|
|
|
2020-01-21 04:43:33 +00:00
|
|
|
// Insert inserts the given string of text at the start location
|
2019-01-17 03:32:33 +00:00
|
|
|
func (b *Buffer) Insert(start Loc, text string) {
|
2019-01-19 20:37:59 +00:00
|
|
|
if !b.Type.Readonly {
|
|
|
|
|
b.EventHandler.cursors = b.cursors
|
|
|
|
|
b.EventHandler.active = b.curCursor
|
|
|
|
|
b.EventHandler.Insert(start, text)
|
2019-12-22 00:55:23 +00:00
|
|
|
|
2020-06-22 21:54:56 +00:00
|
|
|
b.RequestBackup()
|
2019-01-19 20:37:59 +00:00
|
|
|
}
|
2019-01-14 21:56:10 +00:00
|
|
|
}
|
|
|
|
|
|
2020-01-21 04:43:33 +00:00
|
|
|
// Remove removes the characters between the start and end locations
|
2019-01-14 21:56:10 +00:00
|
|
|
func (b *Buffer) Remove(start, end Loc) {
|
2019-01-19 20:37:59 +00:00
|
|
|
if !b.Type.Readonly {
|
|
|
|
|
b.EventHandler.cursors = b.cursors
|
|
|
|
|
b.EventHandler.active = b.curCursor
|
|
|
|
|
b.EventHandler.Remove(start, end)
|
2019-12-22 00:55:23 +00:00
|
|
|
|
2020-06-22 21:54:56 +00:00
|
|
|
b.RequestBackup()
|
2019-01-19 20:37:59 +00:00
|
|
|
}
|
2019-01-14 21:56:10 +00:00
|
|
|
}
|
|
|
|
|
|
2016-08-25 01:29:23 +00:00
|
|
|
// FileType returns the buffer's filetype
|
|
|
|
|
func (b *Buffer) FileType() string {
|
|
|
|
|
return b.Settings["filetype"].(string)
|
2016-03-26 14:54:18 +00:00
|
|
|
}
|
|
|
|
|
|
2019-12-26 22:59:23 +00:00
|
|
|
// ExternallyModified returns whether the file being edited has
|
|
|
|
|
// been modified by some external process
|
|
|
|
|
func (b *Buffer) ExternallyModified() bool {
|
|
|
|
|
modTime, err := util.GetModTime(b.Path)
|
|
|
|
|
if err == nil {
|
|
|
|
|
return modTime != b.ModTime
|
|
|
|
|
}
|
|
|
|
|
return false
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// UpdateModTime updates the modtime of this file
|
|
|
|
|
func (b *Buffer) UpdateModTime() (err error) {
|
|
|
|
|
b.ModTime, err = util.GetModTime(b.Path)
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
|
2016-05-30 21:48:33 +00:00
|
|
|
// ReOpen reloads the current buffer from disk
|
2018-08-26 03:06:44 +00:00
|
|
|
func (b *Buffer) ReOpen() error {
|
2019-01-24 00:06:20 +00:00
|
|
|
file, err := os.Open(b.Path)
|
|
|
|
|
if err != nil {
|
|
|
|
|
return err
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
enc, err := htmlindex.Get(b.Settings["encoding"].(string))
|
|
|
|
|
if err != nil {
|
|
|
|
|
return err
|
|
|
|
|
}
|
|
|
|
|
|
2020-02-09 20:36:31 +00:00
|
|
|
reader := bufio.NewReader(transform.NewReader(file, enc.NewDecoder()))
|
2024-05-30 19:34:11 +00:00
|
|
|
data, err := io.ReadAll(reader)
|
2016-05-30 21:48:33 +00:00
|
|
|
txt := string(data)
|
|
|
|
|
|
|
|
|
|
if err != nil {
|
2018-08-26 03:06:44 +00:00
|
|
|
return err
|
2016-05-30 21:48:33 +00:00
|
|
|
}
|
2016-05-31 01:01:40 +00:00
|
|
|
b.EventHandler.ApplyDiff(txt)
|
2016-05-30 21:48:33 +00:00
|
|
|
|
2019-12-26 22:59:23 +00:00
|
|
|
err = b.UpdateModTime()
|
2020-02-12 18:18:59 +00:00
|
|
|
if !b.Settings["fastdirty"].(bool) {
|
2024-08-18 13:33:35 +00:00
|
|
|
if len(data) > LargeFileThreshold {
|
|
|
|
|
b.Settings["fastdirty"] = true
|
|
|
|
|
} else {
|
|
|
|
|
calcHash(b, &b.origHash)
|
|
|
|
|
}
|
2020-02-12 18:18:59 +00:00
|
|
|
}
|
2019-01-14 21:52:25 +00:00
|
|
|
b.isModified = false
|
2019-01-24 23:25:59 +00:00
|
|
|
b.RelocateCursors()
|
|
|
|
|
return err
|
|
|
|
|
}
|
|
|
|
|
|
2020-01-21 04:43:33 +00:00
|
|
|
// RelocateCursors relocates all cursors (makes sure they are in the buffer)
|
2019-01-24 23:25:59 +00:00
|
|
|
func (b *Buffer) RelocateCursors() {
|
2019-01-02 22:39:50 +00:00
|
|
|
for _, c := range b.cursors {
|
|
|
|
|
c.Relocate()
|
|
|
|
|
}
|
2016-03-17 21:27:57 +00:00
|
|
|
}
|
|
|
|
|
|
Fix various issues with `SpawnMultiCursor{Up,Down}` (#3145)
* SpawnMultiCursorUp/Down: change order of adding cursors
SpawnMultiCursor{Up,Down} currently works in a tricky way: instead of
creating a new cursor above or below, it moves the current "primary"
cursor above or below, and then creates a new cursor below or above the
new position of the current cursor (i.e. at its previous position),
creating an illusion for the user that the current (top-most or
bottom-most) cursor is a newly spawned cursor.
This trick causes at least the following issues:
- When the line above or below, where we spawn a new cursor, is shorter
than the current cursor position in the current line, the new cursor
is placed at the end of this short line (which is expected), but also
the current cursor unexpectedly changes its x position and moves
below/above the new cursor.
- When removing a cursor in RemoveMultiCursor (default Alt-p key), it
non-intuitively removes the cursor which, from the user point of view,
is not the last but the last-but-one cursor.
Fix these issues by replacing the trick with a straightforward logic:
just create the new cursor above or below the last one.
Note that this fix has a user-visible side effect: the last cursor is
no longer the "primary" one (since it is now the last in the list, not
the first), so e.g. when the user clears multicursors via Esc key, the
remaining cursor is the first one, not the last one. I assume it's ok.
* SpawnMultiCursorUp/Down: move common code to a helper fn
* SpawnMultiCursorUp/Down: honor visual width and LastVisualX
Make spawning multicursors up/down behave more similarly to cursor
movements up/down. This change fixes 2 issues at once:
- SpawnMultiCursorUp/Down doesn't take into account the visual width of
the text before the cursor, which may be different from its character
width (e.g. if it contains tabs). So e.g. if the number of tabs before
the cursor in the current line is not the same as in the new line, the
new cursor is placed at an unexpected location.
- SpawnMultiCursorUp/Down doesn't take into account the cursor's
remembered x position (LastVisualX) when e.g. spawning a new cursor
in the below line which is short than the current cursor position, and
then spawning yet another cursor in the next below line which is
longer than this short line.
* SpawnMultiCursorUp/Down: honor softwrap
When softwrap is enabled and the current line is wrapped, make
SpawnMultiCursor{Up,Down} spawn cursor in the next visual line within
this wrapped line, similarly to how we handle cursor movements up/down
within wrapped lines.
* SpawnMultiCursorUp/Down: deselect when spawning cursors
To avoid weird user experience (spawned cursors messing with selections
of existing cursors).
2024-03-04 21:23:50 +00:00
|
|
|
// DeselectCursors removes selection from all cursors
|
|
|
|
|
func (b *Buffer) DeselectCursors() {
|
|
|
|
|
for _, c := range b.cursors {
|
|
|
|
|
c.Deselect(true)
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2018-08-26 03:06:44 +00:00
|
|
|
// RuneAt returns the rune at a given location in the buffer
|
|
|
|
|
func (b *Buffer) RuneAt(loc Loc) rune {
|
|
|
|
|
line := b.LineBytes(loc.Y)
|
|
|
|
|
if len(line) > 0 {
|
|
|
|
|
i := 0
|
|
|
|
|
for len(line) > 0 {
|
2020-05-20 20:43:12 +00:00
|
|
|
r, _, size := util.DecodeCharacter(line)
|
2018-08-26 03:06:44 +00:00
|
|
|
line = line[size:]
|
|
|
|
|
|
|
|
|
|
if i == loc.X {
|
|
|
|
|
return r
|
|
|
|
|
}
|
2020-10-20 00:36:14 +00:00
|
|
|
|
|
|
|
|
i++
|
2018-08-26 03:06:44 +00:00
|
|
|
}
|
2016-06-07 15:43:28 +00:00
|
|
|
}
|
2018-08-26 03:06:44 +00:00
|
|
|
return '\n'
|
2016-05-07 14:57:40 +00:00
|
|
|
}
|
|
|
|
|
|
2021-04-07 20:20:39 +00:00
|
|
|
// WordAt returns the word around a given location in the buffer
|
|
|
|
|
func (b *Buffer) WordAt(loc Loc) []byte {
|
|
|
|
|
if len(b.LineBytes(loc.Y)) == 0 || !util.IsWordChar(b.RuneAt(loc)) {
|
|
|
|
|
return []byte{}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
start := loc
|
|
|
|
|
end := loc.Move(1, b)
|
|
|
|
|
|
|
|
|
|
for start.X > 0 && util.IsWordChar(b.RuneAt(start.Move(-1, b))) {
|
|
|
|
|
start.X--
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
lineLen := util.CharacterCount(b.LineBytes(loc.Y))
|
|
|
|
|
for end.X < lineLen && util.IsWordChar(b.RuneAt(end)) {
|
|
|
|
|
end.X++
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return b.Substr(start, end)
|
|
|
|
|
}
|
|
|
|
|
|
2025-04-18 11:19:19 +00:00
|
|
|
// Shared returns if there are other buffers with the same file as this buffer
|
|
|
|
|
func (b *Buffer) Shared() bool {
|
|
|
|
|
for _, buf := range OpenBuffers {
|
|
|
|
|
if buf != b && buf.SharedBuffer == b.SharedBuffer {
|
|
|
|
|
return true
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
return false
|
|
|
|
|
}
|
|
|
|
|
|
2018-08-26 03:06:44 +00:00
|
|
|
// Modified returns if this buffer has been modified since
|
|
|
|
|
// being opened
|
|
|
|
|
func (b *Buffer) Modified() bool {
|
2019-01-19 20:37:59 +00:00
|
|
|
if b.Type.Scratch {
|
|
|
|
|
return false
|
|
|
|
|
}
|
|
|
|
|
|
2018-08-26 03:06:44 +00:00
|
|
|
if b.Settings["fastdirty"].(bool) {
|
2019-01-14 21:52:25 +00:00
|
|
|
return b.isModified
|
2018-05-26 14:07:53 +00:00
|
|
|
}
|
|
|
|
|
|
2018-08-26 03:06:44 +00:00
|
|
|
var buff [md5.Size]byte
|
2016-10-12 04:44:49 +00:00
|
|
|
|
2018-08-26 03:06:44 +00:00
|
|
|
calcHash(b, &buff)
|
|
|
|
|
return buff != b.origHash
|
2016-10-12 04:44:49 +00:00
|
|
|
}
|
|
|
|
|
|
2020-06-24 21:19:42 +00:00
|
|
|
// Size returns the number of bytes in the current buffer
|
|
|
|
|
func (b *Buffer) Size() int {
|
|
|
|
|
nb := 0
|
|
|
|
|
for i := 0; i < b.LinesNum(); i++ {
|
|
|
|
|
nb += len(b.LineBytes(i))
|
|
|
|
|
|
|
|
|
|
if i != b.LinesNum()-1 {
|
|
|
|
|
if b.Endings == FFDos {
|
|
|
|
|
nb++ // carriage return
|
|
|
|
|
}
|
|
|
|
|
nb++ // newline
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
return nb
|
|
|
|
|
}
|
|
|
|
|
|
2018-08-26 03:06:44 +00:00
|
|
|
// calcHash calculates md5 hash of all lines in the buffer
|
calcHash: Remove checking file size
Let calcHash() unconditionally hash whatever buffer it is asked to hash,
and let its callers explicitly check if the buffer is too large before
calling calcHash(). This makes things simpler and less error-prone
(no extra source of truth about whether the file is too large, we don't
need to remember to check if calcHash() fails, we can be sure calcHash()
will actually update the provided hash), and actually faster (since just
calculating the buffer size, i.e. adding line lengths, is faster than
md5 calculation).
In particular, this fixes the following bugs:
1. Since ReOpen() doesn't check calcHash() return value, if the reloaded
file is too large while the old version of the file is not,
calcHash() returns ErrFileTooLarge and doesn't update origHash, so
so Modified() returns true since the reloaded file's md5 sum doesn't
match the old origHash, so micro wrongly reports the newly reloaded
file as modified.
2. Since Modified() doesn't check calcHash() return value, Modified()
may return false positives or false negatives if the buffer has
*just* become too large so calcHash() returns ErrFileTooLarge and
doesn't update `buff`.
2024-08-18 13:10:07 +00:00
|
|
|
func calcHash(b *Buffer, out *[md5.Size]byte) {
|
2018-08-26 03:06:44 +00:00
|
|
|
h := md5.New()
|
2017-03-20 21:40:33 +00:00
|
|
|
|
2018-08-26 03:06:44 +00:00
|
|
|
if len(b.lines) > 0 {
|
calcHash: Remove checking file size
Let calcHash() unconditionally hash whatever buffer it is asked to hash,
and let its callers explicitly check if the buffer is too large before
calling calcHash(). This makes things simpler and less error-prone
(no extra source of truth about whether the file is too large, we don't
need to remember to check if calcHash() fails, we can be sure calcHash()
will actually update the provided hash), and actually faster (since just
calculating the buffer size, i.e. adding line lengths, is faster than
md5 calculation).
In particular, this fixes the following bugs:
1. Since ReOpen() doesn't check calcHash() return value, if the reloaded
file is too large while the old version of the file is not,
calcHash() returns ErrFileTooLarge and doesn't update origHash, so
so Modified() returns true since the reloaded file's md5 sum doesn't
match the old origHash, so micro wrongly reports the newly reloaded
file as modified.
2. Since Modified() doesn't check calcHash() return value, Modified()
may return false positives or false negatives if the buffer has
*just* become too large so calcHash() returns ErrFileTooLarge and
doesn't update `buff`.
2024-08-18 13:10:07 +00:00
|
|
|
h.Write(b.lines[0].data)
|
2017-09-04 19:47:24 +00:00
|
|
|
|
2018-08-26 03:06:44 +00:00
|
|
|
for _, l := range b.lines[1:] {
|
2024-08-17 14:56:15 +00:00
|
|
|
if b.Endings == FFDos {
|
calcHash: Remove checking file size
Let calcHash() unconditionally hash whatever buffer it is asked to hash,
and let its callers explicitly check if the buffer is too large before
calling calcHash(). This makes things simpler and less error-prone
(no extra source of truth about whether the file is too large, we don't
need to remember to check if calcHash() fails, we can be sure calcHash()
will actually update the provided hash), and actually faster (since just
calculating the buffer size, i.e. adding line lengths, is faster than
md5 calculation).
In particular, this fixes the following bugs:
1. Since ReOpen() doesn't check calcHash() return value, if the reloaded
file is too large while the old version of the file is not,
calcHash() returns ErrFileTooLarge and doesn't update origHash, so
so Modified() returns true since the reloaded file's md5 sum doesn't
match the old origHash, so micro wrongly reports the newly reloaded
file as modified.
2. Since Modified() doesn't check calcHash() return value, Modified()
may return false positives or false negatives if the buffer has
*just* become too large so calcHash() returns ErrFileTooLarge and
doesn't update `buff`.
2024-08-18 13:10:07 +00:00
|
|
|
h.Write([]byte{'\r', '\n'})
|
2024-08-17 14:56:15 +00:00
|
|
|
} else {
|
calcHash: Remove checking file size
Let calcHash() unconditionally hash whatever buffer it is asked to hash,
and let its callers explicitly check if the buffer is too large before
calling calcHash(). This makes things simpler and less error-prone
(no extra source of truth about whether the file is too large, we don't
need to remember to check if calcHash() fails, we can be sure calcHash()
will actually update the provided hash), and actually faster (since just
calculating the buffer size, i.e. adding line lengths, is faster than
md5 calculation).
In particular, this fixes the following bugs:
1. Since ReOpen() doesn't check calcHash() return value, if the reloaded
file is too large while the old version of the file is not,
calcHash() returns ErrFileTooLarge and doesn't update origHash, so
so Modified() returns true since the reloaded file's md5 sum doesn't
match the old origHash, so micro wrongly reports the newly reloaded
file as modified.
2. Since Modified() doesn't check calcHash() return value, Modified()
may return false positives or false negatives if the buffer has
*just* become too large so calcHash() returns ErrFileTooLarge and
doesn't update `buff`.
2024-08-18 13:10:07 +00:00
|
|
|
h.Write([]byte{'\n'})
|
2024-08-17 14:56:15 +00:00
|
|
|
}
|
calcHash: Remove checking file size
Let calcHash() unconditionally hash whatever buffer it is asked to hash,
and let its callers explicitly check if the buffer is too large before
calling calcHash(). This makes things simpler and less error-prone
(no extra source of truth about whether the file is too large, we don't
need to remember to check if calcHash() fails, we can be sure calcHash()
will actually update the provided hash), and actually faster (since just
calculating the buffer size, i.e. adding line lengths, is faster than
md5 calculation).
In particular, this fixes the following bugs:
1. Since ReOpen() doesn't check calcHash() return value, if the reloaded
file is too large while the old version of the file is not,
calcHash() returns ErrFileTooLarge and doesn't update origHash, so
so Modified() returns true since the reloaded file's md5 sum doesn't
match the old origHash, so micro wrongly reports the newly reloaded
file as modified.
2. Since Modified() doesn't check calcHash() return value, Modified()
may return false positives or false negatives if the buffer has
*just* become too large so calcHash() returns ErrFileTooLarge and
doesn't update `buff`.
2024-08-18 13:10:07 +00:00
|
|
|
h.Write(l.data)
|
2018-08-26 03:06:44 +00:00
|
|
|
}
|
2017-09-04 19:47:24 +00:00
|
|
|
}
|
2018-01-07 20:50:08 +00:00
|
|
|
|
2018-08-26 03:06:44 +00:00
|
|
|
h.Sum((*out)[:0])
|
2018-01-07 20:50:08 +00:00
|
|
|
}
|
|
|
|
|
|
2024-04-14 16:01:12 +00:00
|
|
|
func parseDefFromFile(f config.RuntimeFile, header *highlight.Header) *highlight.Def {
|
|
|
|
|
data, err := f.Data()
|
|
|
|
|
if err != nil {
|
|
|
|
|
screen.TermMessage("Error loading syntax file " + f.Name() + ": " + err.Error())
|
|
|
|
|
return nil
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if header == nil {
|
|
|
|
|
header, err = highlight.MakeHeaderYaml(data)
|
|
|
|
|
if err != nil {
|
|
|
|
|
screen.TermMessage("Error parsing header for syntax file " + f.Name() + ": " + err.Error())
|
|
|
|
|
return nil
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
file, err := highlight.ParseFile(data)
|
|
|
|
|
if err != nil {
|
|
|
|
|
screen.TermMessage("Error parsing syntax file " + f.Name() + ": " + err.Error())
|
|
|
|
|
return nil
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
syndef, err := highlight.ParseDef(file, header)
|
|
|
|
|
if err != nil {
|
|
|
|
|
screen.TermMessage("Error parsing syntax file " + f.Name() + ": " + err.Error())
|
|
|
|
|
return nil
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return syndef
|
|
|
|
|
}
|
|
|
|
|
|
2024-04-14 16:13:24 +00:00
|
|
|
// findRealRuntimeSyntaxDef finds a specific syntax definition
|
|
|
|
|
// in the user's custom syntax files
|
|
|
|
|
func findRealRuntimeSyntaxDef(name string, header *highlight.Header) *highlight.Def {
|
|
|
|
|
for _, f := range config.ListRealRuntimeFiles(config.RTSyntax) {
|
|
|
|
|
if f.Name() == name {
|
|
|
|
|
syndef := parseDefFromFile(f, header)
|
|
|
|
|
if syndef != nil {
|
|
|
|
|
return syndef
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
return nil
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// findRuntimeSyntaxDef finds a specific syntax definition
|
2024-04-18 22:10:58 +00:00
|
|
|
// in the built-in syntax files
|
2024-04-14 16:13:24 +00:00
|
|
|
func findRuntimeSyntaxDef(name string, header *highlight.Header) *highlight.Def {
|
|
|
|
|
for _, f := range config.ListRuntimeFiles(config.RTSyntax) {
|
|
|
|
|
if f.Name() == name {
|
|
|
|
|
syndef := parseDefFromFile(f, header)
|
|
|
|
|
if syndef != nil {
|
|
|
|
|
return syndef
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
return nil
|
|
|
|
|
}
|
|
|
|
|
|
2024-04-21 13:13:03 +00:00
|
|
|
func resolveIncludes(syndef *highlight.Def) {
|
2024-04-21 13:14:21 +00:00
|
|
|
includes := highlight.GetIncludes(syndef)
|
|
|
|
|
if len(includes) == 0 {
|
2024-04-21 13:13:03 +00:00
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
var files []*highlight.File
|
|
|
|
|
for _, f := range config.ListRuntimeFiles(config.RTSyntax) {
|
|
|
|
|
data, err := f.Data()
|
|
|
|
|
if err != nil {
|
|
|
|
|
screen.TermMessage("Error loading syntax file " + f.Name() + ": " + err.Error())
|
|
|
|
|
continue
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
header, err := highlight.MakeHeaderYaml(data)
|
|
|
|
|
if err != nil {
|
|
|
|
|
screen.TermMessage("Error parsing syntax file " + f.Name() + ": " + err.Error())
|
|
|
|
|
continue
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
for _, i := range includes {
|
|
|
|
|
if header.FileType == i {
|
|
|
|
|
file, err := highlight.ParseFile(data)
|
|
|
|
|
if err != nil {
|
|
|
|
|
screen.TermMessage("Error parsing syntax file " + f.Name() + ": " + err.Error())
|
|
|
|
|
continue
|
|
|
|
|
}
|
|
|
|
|
files = append(files, file)
|
|
|
|
|
break
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
if len(files) >= len(includes) {
|
|
|
|
|
break
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
highlight.ResolveIncludes(syndef, files)
|
|
|
|
|
}
|
|
|
|
|
|
2018-08-26 03:06:44 +00:00
|
|
|
// UpdateRules updates the syntax rules and filetype for this buffer
|
|
|
|
|
// This is called when the colorscheme changes
|
|
|
|
|
func (b *Buffer) UpdateRules() {
|
2019-01-01 03:07:01 +00:00
|
|
|
if !b.Type.Syntax {
|
|
|
|
|
return
|
|
|
|
|
}
|
2019-12-29 02:26:22 +00:00
|
|
|
ft := b.Settings["filetype"].(string)
|
2020-01-03 00:00:42 +00:00
|
|
|
if ft == "off" {
|
2024-03-25 18:38:33 +00:00
|
|
|
b.ClearMatches()
|
|
|
|
|
b.SyntaxDef = nil
|
2020-01-03 00:00:42 +00:00
|
|
|
return
|
|
|
|
|
}
|
2023-05-04 21:48:42 +00:00
|
|
|
|
2024-04-18 20:39:16 +00:00
|
|
|
b.SyntaxDef = nil
|
|
|
|
|
|
2024-03-23 20:49:53 +00:00
|
|
|
// syntaxFileInfo is an internal helper structure
|
2023-05-04 21:48:42 +00:00
|
|
|
// to store properties of one single syntax file
|
2024-03-23 20:49:53 +00:00
|
|
|
type syntaxFileInfo struct {
|
2023-05-04 21:48:42 +00:00
|
|
|
header *highlight.Header
|
|
|
|
|
fileName string
|
|
|
|
|
syntaxDef *highlight.Def
|
|
|
|
|
}
|
|
|
|
|
|
2024-03-23 20:49:53 +00:00
|
|
|
fnameMatches := []syntaxFileInfo{}
|
|
|
|
|
headerMatches := []syntaxFileInfo{}
|
2020-01-03 00:00:42 +00:00
|
|
|
syntaxFile := ""
|
2020-03-24 14:52:15 +00:00
|
|
|
foundDef := false
|
2019-12-29 02:26:22 +00:00
|
|
|
var header *highlight.Header
|
2020-03-24 14:52:15 +00:00
|
|
|
// search for the syntax file in the user's custom syntax files
|
|
|
|
|
for _, f := range config.ListRealRuntimeFiles(config.RTSyntax) {
|
2024-04-17 16:11:33 +00:00
|
|
|
if f.Name() == "default" {
|
|
|
|
|
continue
|
|
|
|
|
}
|
|
|
|
|
|
2020-03-24 14:52:15 +00:00
|
|
|
data, err := f.Data()
|
|
|
|
|
if err != nil {
|
|
|
|
|
screen.TermMessage("Error loading syntax file " + f.Name() + ": " + err.Error())
|
|
|
|
|
continue
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
header, err = highlight.MakeHeaderYaml(data)
|
2020-09-16 04:08:01 +00:00
|
|
|
if err != nil {
|
|
|
|
|
screen.TermMessage("Error parsing header for syntax file " + f.Name() + ": " + err.Error())
|
2023-09-10 02:17:23 +00:00
|
|
|
continue
|
2020-09-16 04:08:01 +00:00
|
|
|
}
|
2020-03-24 14:52:15 +00:00
|
|
|
|
2024-03-24 01:42:33 +00:00
|
|
|
matchedFileType := false
|
UpdateRules: reintroduce using header regex for filetype detection
Replacing header patterns with signature patterns was a mistake, since
both are quite different from each other, and both have their uses. In
fact, this caused a serious regression: for such files as shell scripts
without *.sh extension but with #!/bin/sh inside, filetype detection
does not work at all anymore.
Since both header and signature patterns are useful, reintroduce support
for header patterns while keeping support for signature patterns as well
and make both work nicely together.
Also, unlike in the old implementation (before signatures were
introduced), ensure that filename matches take precedence over header
matches, i.e. if there is at least one filename match found, all header
matches are ignored. This makes the behavior more deterministic and
prevents previously observed issues like #2894 and #3054: wrongly
detected filetypes caused by some overly general header patterns.
Precisely, the new behavior is:
1. if there is at least one filename match, use filename matches only
2. if there are no filename matches, use header matches
3. in both cases, try to use signatures to find the best match among
multiple filename or header matches
2024-03-23 20:24:44 +00:00
|
|
|
matchedFileName := false
|
|
|
|
|
matchedFileHeader := false
|
|
|
|
|
|
|
|
|
|
if ft == "unknown" || ft == "" {
|
|
|
|
|
if header.MatchFileName(b.Path) {
|
|
|
|
|
matchedFileName = true
|
|
|
|
|
}
|
|
|
|
|
if len(fnameMatches) == 0 && header.MatchFileHeader(b.lines[0].data) {
|
|
|
|
|
matchedFileHeader = true
|
|
|
|
|
}
|
2024-03-24 01:42:33 +00:00
|
|
|
} else if header.FileType == ft {
|
|
|
|
|
matchedFileType = true
|
UpdateRules: reintroduce using header regex for filetype detection
Replacing header patterns with signature patterns was a mistake, since
both are quite different from each other, and both have their uses. In
fact, this caused a serious regression: for such files as shell scripts
without *.sh extension but with #!/bin/sh inside, filetype detection
does not work at all anymore.
Since both header and signature patterns are useful, reintroduce support
for header patterns while keeping support for signature patterns as well
and make both work nicely together.
Also, unlike in the old implementation (before signatures were
introduced), ensure that filename matches take precedence over header
matches, i.e. if there is at least one filename match found, all header
matches are ignored. This makes the behavior more deterministic and
prevents previously observed issues like #2894 and #3054: wrongly
detected filetypes caused by some overly general header patterns.
Precisely, the new behavior is:
1. if there is at least one filename match, use filename matches only
2. if there are no filename matches, use header matches
3. in both cases, try to use signatures to find the best match among
multiple filename or header matches
2024-03-23 20:24:44 +00:00
|
|
|
}
|
|
|
|
|
|
2024-03-24 01:42:33 +00:00
|
|
|
if matchedFileType || matchedFileName || matchedFileHeader {
|
2024-03-23 17:29:03 +00:00
|
|
|
file, err := highlight.ParseFile(data)
|
|
|
|
|
if err != nil {
|
|
|
|
|
screen.TermMessage("Error parsing syntax file " + f.Name() + ": " + err.Error())
|
|
|
|
|
continue
|
|
|
|
|
}
|
|
|
|
|
|
2020-03-24 14:52:15 +00:00
|
|
|
syndef, err := highlight.ParseDef(file, header)
|
|
|
|
|
if err != nil {
|
|
|
|
|
screen.TermMessage("Error parsing syntax file " + f.Name() + ": " + err.Error())
|
|
|
|
|
continue
|
|
|
|
|
}
|
2023-05-04 21:48:42 +00:00
|
|
|
|
2024-03-24 01:42:33 +00:00
|
|
|
if matchedFileType {
|
2023-05-04 21:48:42 +00:00
|
|
|
b.SyntaxDef = syndef
|
|
|
|
|
syntaxFile = f.Name()
|
2024-03-23 16:49:09 +00:00
|
|
|
foundDef = true
|
2023-05-04 21:48:42 +00:00
|
|
|
break
|
UpdateRules: reintroduce using header regex for filetype detection
Replacing header patterns with signature patterns was a mistake, since
both are quite different from each other, and both have their uses. In
fact, this caused a serious regression: for such files as shell scripts
without *.sh extension but with #!/bin/sh inside, filetype detection
does not work at all anymore.
Since both header and signature patterns are useful, reintroduce support
for header patterns while keeping support for signature patterns as well
and make both work nicely together.
Also, unlike in the old implementation (before signatures were
introduced), ensure that filename matches take precedence over header
matches, i.e. if there is at least one filename match found, all header
matches are ignored. This makes the behavior more deterministic and
prevents previously observed issues like #2894 and #3054: wrongly
detected filetypes caused by some overly general header patterns.
Precisely, the new behavior is:
1. if there is at least one filename match, use filename matches only
2. if there are no filename matches, use header matches
3. in both cases, try to use signatures to find the best match among
multiple filename or header matches
2024-03-23 20:24:44 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if matchedFileName {
|
2024-03-23 20:49:53 +00:00
|
|
|
fnameMatches = append(fnameMatches, syntaxFileInfo{header, f.Name(), syndef})
|
UpdateRules: reintroduce using header regex for filetype detection
Replacing header patterns with signature patterns was a mistake, since
both are quite different from each other, and both have their uses. In
fact, this caused a serious regression: for such files as shell scripts
without *.sh extension but with #!/bin/sh inside, filetype detection
does not work at all anymore.
Since both header and signature patterns are useful, reintroduce support
for header patterns while keeping support for signature patterns as well
and make both work nicely together.
Also, unlike in the old implementation (before signatures were
introduced), ensure that filename matches take precedence over header
matches, i.e. if there is at least one filename match found, all header
matches are ignored. This makes the behavior more deterministic and
prevents previously observed issues like #2894 and #3054: wrongly
detected filetypes caused by some overly general header patterns.
Precisely, the new behavior is:
1. if there is at least one filename match, use filename matches only
2. if there are no filename matches, use header matches
3. in both cases, try to use signatures to find the best match among
multiple filename or header matches
2024-03-23 20:24:44 +00:00
|
|
|
} else if matchedFileHeader {
|
2024-03-23 20:49:53 +00:00
|
|
|
headerMatches = append(headerMatches, syntaxFileInfo{header, f.Name(), syndef})
|
2023-05-04 21:48:42 +00:00
|
|
|
}
|
2020-03-24 14:52:15 +00:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2023-09-08 18:44:00 +00:00
|
|
|
if !foundDef {
|
2024-04-18 22:10:58 +00:00
|
|
|
// search for the syntax file in the built-in syntax files
|
2023-09-08 18:44:00 +00:00
|
|
|
for _, f := range config.ListRuntimeFiles(config.RTSyntaxHeader) {
|
|
|
|
|
data, err := f.Data()
|
|
|
|
|
if err != nil {
|
|
|
|
|
screen.TermMessage("Error loading syntax header file " + f.Name() + ": " + err.Error())
|
|
|
|
|
continue
|
|
|
|
|
}
|
2019-12-29 02:26:22 +00:00
|
|
|
|
2023-09-08 18:44:00 +00:00
|
|
|
header, err = highlight.MakeHeader(data)
|
|
|
|
|
if err != nil {
|
|
|
|
|
screen.TermMessage("Error reading syntax header file", f.Name(), err)
|
|
|
|
|
continue
|
|
|
|
|
}
|
2019-12-29 02:26:22 +00:00
|
|
|
|
2023-09-08 18:44:00 +00:00
|
|
|
if ft == "unknown" || ft == "" {
|
|
|
|
|
if header.MatchFileName(b.Path) {
|
2024-03-23 20:49:53 +00:00
|
|
|
fnameMatches = append(fnameMatches, syntaxFileInfo{header, f.Name(), nil})
|
2023-09-08 18:44:00 +00:00
|
|
|
}
|
UpdateRules: reintroduce using header regex for filetype detection
Replacing header patterns with signature patterns was a mistake, since
both are quite different from each other, and both have their uses. In
fact, this caused a serious regression: for such files as shell scripts
without *.sh extension but with #!/bin/sh inside, filetype detection
does not work at all anymore.
Since both header and signature patterns are useful, reintroduce support
for header patterns while keeping support for signature patterns as well
and make both work nicely together.
Also, unlike in the old implementation (before signatures were
introduced), ensure that filename matches take precedence over header
matches, i.e. if there is at least one filename match found, all header
matches are ignored. This makes the behavior more deterministic and
prevents previously observed issues like #2894 and #3054: wrongly
detected filetypes caused by some overly general header patterns.
Precisely, the new behavior is:
1. if there is at least one filename match, use filename matches only
2. if there are no filename matches, use header matches
3. in both cases, try to use signatures to find the best match among
multiple filename or header matches
2024-03-23 20:24:44 +00:00
|
|
|
if len(fnameMatches) == 0 && header.MatchFileHeader(b.lines[0].data) {
|
2024-03-23 20:49:53 +00:00
|
|
|
headerMatches = append(headerMatches, syntaxFileInfo{header, f.Name(), nil})
|
UpdateRules: reintroduce using header regex for filetype detection
Replacing header patterns with signature patterns was a mistake, since
both are quite different from each other, and both have their uses. In
fact, this caused a serious regression: for such files as shell scripts
without *.sh extension but with #!/bin/sh inside, filetype detection
does not work at all anymore.
Since both header and signature patterns are useful, reintroduce support
for header patterns while keeping support for signature patterns as well
and make both work nicely together.
Also, unlike in the old implementation (before signatures were
introduced), ensure that filename matches take precedence over header
matches, i.e. if there is at least one filename match found, all header
matches are ignored. This makes the behavior more deterministic and
prevents previously observed issues like #2894 and #3054: wrongly
detected filetypes caused by some overly general header patterns.
Precisely, the new behavior is:
1. if there is at least one filename match, use filename matches only
2. if there are no filename matches, use header matches
3. in both cases, try to use signatures to find the best match among
multiple filename or header matches
2024-03-23 20:24:44 +00:00
|
|
|
}
|
2023-09-08 18:44:00 +00:00
|
|
|
} else if header.FileType == ft {
|
|
|
|
|
syntaxFile = f.Name()
|
|
|
|
|
break
|
2019-12-29 02:26:22 +00:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2023-05-04 21:48:42 +00:00
|
|
|
if syntaxFile == "" {
|
UpdateRules: reintroduce using header regex for filetype detection
Replacing header patterns with signature patterns was a mistake, since
both are quite different from each other, and both have their uses. In
fact, this caused a serious regression: for such files as shell scripts
without *.sh extension but with #!/bin/sh inside, filetype detection
does not work at all anymore.
Since both header and signature patterns are useful, reintroduce support
for header patterns while keeping support for signature patterns as well
and make both work nicely together.
Also, unlike in the old implementation (before signatures were
introduced), ensure that filename matches take precedence over header
matches, i.e. if there is at least one filename match found, all header
matches are ignored. This makes the behavior more deterministic and
prevents previously observed issues like #2894 and #3054: wrongly
detected filetypes caused by some overly general header patterns.
Precisely, the new behavior is:
1. if there is at least one filename match, use filename matches only
2. if there are no filename matches, use header matches
3. in both cases, try to use signatures to find the best match among
multiple filename or header matches
2024-03-23 20:24:44 +00:00
|
|
|
matches := fnameMatches
|
|
|
|
|
if len(matches) == 0 {
|
|
|
|
|
matches = headerMatches
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
length := len(matches)
|
2023-05-04 21:48:42 +00:00
|
|
|
if length > 0 {
|
|
|
|
|
signatureMatch := false
|
|
|
|
|
if length > 1 {
|
2024-03-24 02:19:07 +00:00
|
|
|
// multiple matching syntax files found, try to resolve the ambiguity
|
|
|
|
|
// using signatures
|
2023-06-06 19:59:35 +00:00
|
|
|
detectlimit := util.IntOpt(b.Settings["detectlimit"])
|
|
|
|
|
lineCount := len(b.lines)
|
|
|
|
|
limit := lineCount
|
|
|
|
|
if detectlimit > 0 && lineCount > detectlimit {
|
|
|
|
|
limit = detectlimit
|
|
|
|
|
}
|
2024-03-23 20:59:40 +00:00
|
|
|
|
|
|
|
|
matchLoop:
|
|
|
|
|
for _, m := range matches {
|
|
|
|
|
if m.header.HasFileSignature() {
|
|
|
|
|
for i := 0; i < limit; i++ {
|
|
|
|
|
if m.header.MatchFileSignature(b.lines[i].data) {
|
|
|
|
|
syntaxFile = m.fileName
|
|
|
|
|
if m.syntaxDef != nil {
|
|
|
|
|
b.SyntaxDef = m.syntaxDef
|
2024-03-23 16:49:09 +00:00
|
|
|
foundDef = true
|
|
|
|
|
}
|
2024-03-23 20:59:40 +00:00
|
|
|
header = m.header
|
2023-05-04 21:48:42 +00:00
|
|
|
signatureMatch = true
|
2024-03-23 20:59:40 +00:00
|
|
|
break matchLoop
|
2023-05-04 21:48:42 +00:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
if length == 1 || !signatureMatch {
|
UpdateRules: reintroduce using header regex for filetype detection
Replacing header patterns with signature patterns was a mistake, since
both are quite different from each other, and both have their uses. In
fact, this caused a serious regression: for such files as shell scripts
without *.sh extension but with #!/bin/sh inside, filetype detection
does not work at all anymore.
Since both header and signature patterns are useful, reintroduce support
for header patterns while keeping support for signature patterns as well
and make both work nicely together.
Also, unlike in the old implementation (before signatures were
introduced), ensure that filename matches take precedence over header
matches, i.e. if there is at least one filename match found, all header
matches are ignored. This makes the behavior more deterministic and
prevents previously observed issues like #2894 and #3054: wrongly
detected filetypes caused by some overly general header patterns.
Precisely, the new behavior is:
1. if there is at least one filename match, use filename matches only
2. if there are no filename matches, use header matches
3. in both cases, try to use signatures to find the best match among
multiple filename or header matches
2024-03-23 20:24:44 +00:00
|
|
|
syntaxFile = matches[0].fileName
|
|
|
|
|
if matches[0].syntaxDef != nil {
|
|
|
|
|
b.SyntaxDef = matches[0].syntaxDef
|
2024-03-23 16:49:09 +00:00
|
|
|
foundDef = true
|
|
|
|
|
}
|
UpdateRules: reintroduce using header regex for filetype detection
Replacing header patterns with signature patterns was a mistake, since
both are quite different from each other, and both have their uses. In
fact, this caused a serious regression: for such files as shell scripts
without *.sh extension but with #!/bin/sh inside, filetype detection
does not work at all anymore.
Since both header and signature patterns are useful, reintroduce support
for header patterns while keeping support for signature patterns as well
and make both work nicely together.
Also, unlike in the old implementation (before signatures were
introduced), ensure that filename matches take precedence over header
matches, i.e. if there is at least one filename match found, all header
matches are ignored. This makes the behavior more deterministic and
prevents previously observed issues like #2894 and #3054: wrongly
detected filetypes caused by some overly general header patterns.
Precisely, the new behavior is:
1. if there is at least one filename match, use filename matches only
2. if there are no filename matches, use header matches
3. in both cases, try to use signatures to find the best match among
multiple filename or header matches
2024-03-23 20:24:44 +00:00
|
|
|
header = matches[0].header
|
2023-05-04 21:48:42 +00:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2020-03-24 14:52:15 +00:00
|
|
|
if syntaxFile != "" && !foundDef {
|
|
|
|
|
// we found a syntax file using a syntax header file
|
2024-04-14 16:13:24 +00:00
|
|
|
b.SyntaxDef = findRuntimeSyntaxDef(syntaxFile, header)
|
2018-08-26 03:06:44 +00:00
|
|
|
}
|
|
|
|
|
|
2024-04-18 21:29:33 +00:00
|
|
|
if b.SyntaxDef != nil {
|
|
|
|
|
b.Settings["filetype"] = b.SyntaxDef.FileType
|
|
|
|
|
} else {
|
|
|
|
|
// search for the default file in the user's custom syntax files
|
|
|
|
|
b.SyntaxDef = findRealRuntimeSyntaxDef("default", nil)
|
|
|
|
|
if b.SyntaxDef == nil {
|
2024-04-18 22:10:58 +00:00
|
|
|
// search for the default file in the built-in syntax files
|
2024-04-18 21:29:33 +00:00
|
|
|
b.SyntaxDef = findRuntimeSyntaxDef("default", nil)
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2024-04-21 13:13:03 +00:00
|
|
|
if b.SyntaxDef != nil {
|
|
|
|
|
resolveIncludes(b.SyntaxDef)
|
2019-12-29 02:57:03 +00:00
|
|
|
}
|
2018-08-26 03:06:44 +00:00
|
|
|
|
2020-01-28 23:34:44 +00:00
|
|
|
if b.SyntaxDef != nil {
|
|
|
|
|
b.Highlighter = highlight.NewHighlighter(b.SyntaxDef)
|
|
|
|
|
if b.Settings["syntax"].(bool) {
|
2020-01-29 01:54:14 +00:00
|
|
|
go func() {
|
|
|
|
|
b.Highlighter.HighlightStates(b)
|
|
|
|
|
b.Highlighter.HighlightMatches(b, 0, b.End().Y)
|
2020-02-12 01:03:32 +00:00
|
|
|
screen.Redraw()
|
2020-01-29 01:54:14 +00:00
|
|
|
}()
|
2018-01-07 20:50:08 +00:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
2018-09-03 20:54:56 +00:00
|
|
|
|
2019-01-14 02:06:58 +00:00
|
|
|
// ClearMatches clears all of the syntax highlighting for the buffer
|
|
|
|
|
func (b *Buffer) ClearMatches() {
|
|
|
|
|
for i := range b.lines {
|
|
|
|
|
b.SetMatch(i, nil)
|
|
|
|
|
b.SetState(i, nil)
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2018-12-31 19:46:04 +00:00
|
|
|
// IndentString returns this buffer's indent method (a tabstop or n spaces
|
|
|
|
|
// depending on the settings)
|
2019-01-17 03:32:33 +00:00
|
|
|
func (b *Buffer) IndentString(tabsize int) string {
|
2018-09-03 20:54:56 +00:00
|
|
|
if b.Settings["tabstospaces"].(bool) {
|
2019-12-26 22:59:23 +00:00
|
|
|
return util.Spaces(tabsize)
|
2018-09-03 20:54:56 +00:00
|
|
|
}
|
2019-01-17 03:32:33 +00:00
|
|
|
return "\t"
|
2018-09-03 20:54:56 +00:00
|
|
|
}
|
2019-01-02 22:39:50 +00:00
|
|
|
|
2019-01-03 04:26:40 +00:00
|
|
|
// SetCursors resets this buffer's cursors to a new list
|
|
|
|
|
func (b *Buffer) SetCursors(c []*Cursor) {
|
|
|
|
|
b.cursors = c
|
2019-01-15 05:24:53 +00:00
|
|
|
b.EventHandler.cursors = b.cursors
|
2019-01-16 22:52:30 +00:00
|
|
|
b.EventHandler.active = b.curCursor
|
2019-01-03 04:26:40 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// AddCursor adds a new cursor to the list
|
|
|
|
|
func (b *Buffer) AddCursor(c *Cursor) {
|
|
|
|
|
b.cursors = append(b.cursors, c)
|
2019-01-15 05:24:53 +00:00
|
|
|
b.EventHandler.cursors = b.cursors
|
2019-01-16 22:52:30 +00:00
|
|
|
b.EventHandler.active = b.curCursor
|
2019-01-03 04:26:40 +00:00
|
|
|
b.UpdateCursors()
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// SetCurCursor sets the current cursor
|
|
|
|
|
func (b *Buffer) SetCurCursor(n int) {
|
|
|
|
|
b.curCursor = n
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// GetActiveCursor returns the main cursor in this buffer
|
|
|
|
|
func (b *Buffer) GetActiveCursor() *Cursor {
|
|
|
|
|
return b.cursors[b.curCursor]
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// GetCursor returns the nth cursor
|
|
|
|
|
func (b *Buffer) GetCursor(n int) *Cursor {
|
|
|
|
|
return b.cursors[n]
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// GetCursors returns the list of cursors in this buffer
|
|
|
|
|
func (b *Buffer) GetCursors() []*Cursor {
|
|
|
|
|
return b.cursors
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// NumCursors returns the number of cursors
|
|
|
|
|
func (b *Buffer) NumCursors() int {
|
|
|
|
|
return len(b.cursors)
|
|
|
|
|
}
|
|
|
|
|
|
2019-01-02 22:39:50 +00:00
|
|
|
// MergeCursors merges any cursors that are at the same position
|
|
|
|
|
// into one cursor
|
|
|
|
|
func (b *Buffer) MergeCursors() {
|
|
|
|
|
var cursors []*Cursor
|
|
|
|
|
for i := 0; i < len(b.cursors); i++ {
|
|
|
|
|
c1 := b.cursors[i]
|
|
|
|
|
if c1 != nil {
|
|
|
|
|
for j := 0; j < len(b.cursors); j++ {
|
|
|
|
|
c2 := b.cursors[j]
|
|
|
|
|
if c2 != nil && i != j && c1.Loc == c2.Loc {
|
|
|
|
|
b.cursors[j] = nil
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
cursors = append(cursors, c1)
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
b.cursors = cursors
|
|
|
|
|
|
|
|
|
|
for i := range b.cursors {
|
|
|
|
|
b.cursors[i].Num = i
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if b.curCursor >= len(b.cursors) {
|
|
|
|
|
b.curCursor = len(b.cursors) - 1
|
|
|
|
|
}
|
2019-01-15 05:24:53 +00:00
|
|
|
b.EventHandler.cursors = b.cursors
|
2019-01-16 22:52:30 +00:00
|
|
|
b.EventHandler.active = b.curCursor
|
2019-01-02 22:39:50 +00:00
|
|
|
}
|
|
|
|
|
|
2024-04-08 10:04:38 +00:00
|
|
|
// UpdateCursors updates all the cursors indices
|
2019-01-02 22:39:50 +00:00
|
|
|
func (b *Buffer) UpdateCursors() {
|
2019-01-16 22:52:30 +00:00
|
|
|
b.EventHandler.cursors = b.cursors
|
|
|
|
|
b.EventHandler.active = b.curCursor
|
2019-01-02 22:39:50 +00:00
|
|
|
for i, c := range b.cursors {
|
|
|
|
|
c.Num = i
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2019-01-03 04:26:40 +00:00
|
|
|
func (b *Buffer) RemoveCursor(i int) {
|
|
|
|
|
copy(b.cursors[i:], b.cursors[i+1:])
|
|
|
|
|
b.cursors[len(b.cursors)-1] = nil
|
|
|
|
|
b.cursors = b.cursors[:len(b.cursors)-1]
|
2019-12-26 22:59:23 +00:00
|
|
|
b.curCursor = util.Clamp(b.curCursor, 0, len(b.cursors)-1)
|
2019-01-16 22:52:30 +00:00
|
|
|
b.UpdateCursors()
|
2019-01-03 04:26:40 +00:00
|
|
|
}
|
|
|
|
|
|
2019-01-02 22:39:50 +00:00
|
|
|
// ClearCursors removes all extra cursors
|
|
|
|
|
func (b *Buffer) ClearCursors() {
|
|
|
|
|
for i := 1; i < len(b.cursors); i++ {
|
|
|
|
|
b.cursors[i] = nil
|
|
|
|
|
}
|
|
|
|
|
b.cursors = b.cursors[:1]
|
|
|
|
|
b.UpdateCursors()
|
|
|
|
|
b.curCursor = 0
|
2024-06-15 18:42:50 +00:00
|
|
|
b.GetActiveCursor().Deselect(true)
|
2019-01-02 22:39:50 +00:00
|
|
|
}
|
2019-01-15 03:38:59 +00:00
|
|
|
|
|
|
|
|
// MoveLinesUp moves the range of lines up one row
|
|
|
|
|
func (b *Buffer) MoveLinesUp(start int, end int) {
|
|
|
|
|
if start < 1 || start >= end || end > len(b.lines) {
|
|
|
|
|
return
|
|
|
|
|
}
|
2019-01-17 03:32:33 +00:00
|
|
|
l := string(b.LineBytes(start - 1))
|
2019-01-15 03:38:59 +00:00
|
|
|
if end == len(b.lines) {
|
2020-06-12 19:16:27 +00:00
|
|
|
b.insert(
|
2019-01-15 03:38:59 +00:00
|
|
|
Loc{
|
2020-05-20 20:47:08 +00:00
|
|
|
util.CharacterCount(b.lines[end-1].data),
|
2019-01-15 03:38:59 +00:00
|
|
|
end - 1,
|
|
|
|
|
},
|
2020-06-12 19:16:27 +00:00
|
|
|
[]byte{'\n'},
|
2019-01-15 03:38:59 +00:00
|
|
|
)
|
|
|
|
|
}
|
2020-06-12 19:16:27 +00:00
|
|
|
b.Insert(
|
|
|
|
|
Loc{0, end},
|
|
|
|
|
l+"\n",
|
|
|
|
|
)
|
2019-01-15 03:38:59 +00:00
|
|
|
b.Remove(
|
|
|
|
|
Loc{0, start - 1},
|
|
|
|
|
Loc{0, start},
|
|
|
|
|
)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// MoveLinesDown moves the range of lines down one row
|
|
|
|
|
func (b *Buffer) MoveLinesDown(start int, end int) {
|
2020-06-12 19:16:27 +00:00
|
|
|
if start < 0 || start >= end || end >= len(b.lines) {
|
2019-01-15 03:38:59 +00:00
|
|
|
return
|
|
|
|
|
}
|
2019-01-17 03:32:33 +00:00
|
|
|
l := string(b.LineBytes(end))
|
2019-01-15 03:38:59 +00:00
|
|
|
b.Insert(
|
|
|
|
|
Loc{0, start},
|
2019-01-17 03:32:33 +00:00
|
|
|
l+"\n",
|
2019-01-15 03:38:59 +00:00
|
|
|
)
|
|
|
|
|
end++
|
|
|
|
|
b.Remove(
|
|
|
|
|
Loc{0, end},
|
|
|
|
|
Loc{0, end + 1},
|
|
|
|
|
)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
var BracePairs = [][2]rune{
|
|
|
|
|
{'(', ')'},
|
|
|
|
|
{'{', '}'},
|
|
|
|
|
{'[', ']'},
|
|
|
|
|
}
|
|
|
|
|
|
2024-06-04 22:56:19 +00:00
|
|
|
func (b *Buffer) findMatchingBrace(braceType [2]rune, start Loc, char rune) (Loc, bool) {
|
2019-01-15 03:38:59 +00:00
|
|
|
var i int
|
2024-06-04 22:56:19 +00:00
|
|
|
if char == braceType[0] {
|
2019-01-15 03:38:59 +00:00
|
|
|
for y := start.Y; y < b.LinesNum(); y++ {
|
|
|
|
|
l := []rune(string(b.LineBytes(y)))
|
|
|
|
|
xInit := 0
|
|
|
|
|
if y == start.Y {
|
2024-06-04 22:56:19 +00:00
|
|
|
xInit = start.X
|
2019-01-15 03:38:59 +00:00
|
|
|
}
|
|
|
|
|
for x := xInit; x < len(l); x++ {
|
|
|
|
|
r := l[x]
|
|
|
|
|
if r == braceType[0] {
|
|
|
|
|
i++
|
|
|
|
|
} else if r == braceType[1] {
|
|
|
|
|
i--
|
|
|
|
|
if i == 0 {
|
2024-06-04 22:56:19 +00:00
|
|
|
return Loc{x, y}, true
|
2019-01-15 03:38:59 +00:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
2024-06-04 22:56:19 +00:00
|
|
|
} else if char == braceType[1] {
|
2019-01-15 03:38:59 +00:00
|
|
|
for y := start.Y; y >= 0; y-- {
|
|
|
|
|
l := []rune(string(b.lines[y].data))
|
|
|
|
|
xInit := len(l) - 1
|
|
|
|
|
if y == start.Y {
|
2024-06-04 22:56:19 +00:00
|
|
|
xInit = start.X
|
2019-01-15 03:38:59 +00:00
|
|
|
}
|
|
|
|
|
for x := xInit; x >= 0; x-- {
|
|
|
|
|
r := l[x]
|
2024-03-13 19:21:27 +00:00
|
|
|
if r == braceType[1] {
|
|
|
|
|
i++
|
|
|
|
|
} else if r == braceType[0] {
|
2019-01-15 03:38:59 +00:00
|
|
|
i--
|
|
|
|
|
if i == 0 {
|
2024-06-04 22:56:19 +00:00
|
|
|
return Loc{x, y}, true
|
2019-01-15 03:38:59 +00:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
2024-06-04 22:56:19 +00:00
|
|
|
return start, false
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// If there is a brace character (for example '{' or ']') at the given start location,
|
|
|
|
|
// FindMatchingBrace returns the location of the matching brace for it (for example '}'
|
|
|
|
|
// or '['). The second returned value is true if there was no matching brace found
|
|
|
|
|
// for given starting location but it was found for the location one character left
|
|
|
|
|
// of it. The third returned value is true if the matching brace was found at all.
|
|
|
|
|
func (b *Buffer) FindMatchingBrace(start Loc) (Loc, bool, bool) {
|
|
|
|
|
// TODO: maybe can be more efficient with utf8 package
|
|
|
|
|
curLine := []rune(string(b.LineBytes(start.Y)))
|
|
|
|
|
|
|
|
|
|
// first try to find matching brace for the given location (it has higher priority)
|
|
|
|
|
if start.X >= 0 && start.X < len(curLine) {
|
|
|
|
|
startChar := curLine[start.X]
|
|
|
|
|
|
|
|
|
|
for _, bp := range BracePairs {
|
|
|
|
|
if startChar == bp[0] || startChar == bp[1] {
|
|
|
|
|
mb, found := b.findMatchingBrace(bp, start, startChar)
|
|
|
|
|
if found {
|
|
|
|
|
return mb, false, true
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
Add `matchbraceleft` option (#3432)
Add `matchbraceleft` option to allow disabling the default behavior
matching not just the brace under cursor but also the brace to the left
of it (which is arguably convenient, but also ambiguous and
non-intuitive). With `matchbraceleft` disabled, micro will only match
the brace character that is precisely under the cursor, and also when
jumping to the matching brace, will always move cursor precisely to the
matching brace character, not to the character next to it.
Nota bene: historical journey:
- There was already a `matchbraceleft` option introduced in commit
ea6a87d41a9f, when this feature (matching brace to the left) was
introduced first time. That time it was matching _only_ the brace
to the left, _instead_ of the brace under the cursor, and was
disabled by default.
- Later this feature was removed during the big refactoring of micro.
- Then this feature was reintroduced again in commit d1e713ce08ba, in
its present form (i.e. combined brace matching both under the cursor
and to the left, simulating I-beam cursor behavior), and it was
introduced unconditionally, without an option to disable it.
- Since then, multiple users complained about this feature and asked
for an option to disable it, so now we are reintroducing it as an
option again (this time enabled by default though).
2024-08-18 19:08:05 +00:00
|
|
|
if b.Settings["matchbraceleft"].(bool) {
|
|
|
|
|
// failed to find matching brace for the given location, so try to find matching
|
|
|
|
|
// brace for the location one character left of it
|
|
|
|
|
if start.X-1 >= 0 && start.X-1 < len(curLine) {
|
|
|
|
|
leftChar := curLine[start.X-1]
|
|
|
|
|
left := Loc{start.X - 1, start.Y}
|
|
|
|
|
|
|
|
|
|
for _, bp := range BracePairs {
|
|
|
|
|
if leftChar == bp[0] || leftChar == bp[1] {
|
|
|
|
|
mb, found := b.findMatchingBrace(bp, left, leftChar)
|
|
|
|
|
if found {
|
|
|
|
|
return mb, true, true
|
|
|
|
|
}
|
2024-06-04 22:56:19 +00:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return start, false, false
|
2019-01-15 03:38:59 +00:00
|
|
|
}
|
2019-01-15 03:44:06 +00:00
|
|
|
|
|
|
|
|
// Retab changes all tabs to spaces or vice versa
|
|
|
|
|
func (b *Buffer) Retab() {
|
|
|
|
|
toSpaces := b.Settings["tabstospaces"].(bool)
|
2019-12-26 22:59:23 +00:00
|
|
|
tabsize := util.IntOpt(b.Settings["tabsize"])
|
2019-01-15 03:44:06 +00:00
|
|
|
dirty := false
|
|
|
|
|
|
|
|
|
|
for i := 0; i < b.LinesNum(); i++ {
|
|
|
|
|
l := b.LineBytes(i)
|
|
|
|
|
|
2019-12-26 22:59:23 +00:00
|
|
|
ws := util.GetLeadingWhitespace(l)
|
2019-01-15 03:44:06 +00:00
|
|
|
if len(ws) != 0 {
|
|
|
|
|
if toSpaces {
|
2021-01-09 18:39:21 +00:00
|
|
|
ws = bytes.ReplaceAll(ws, []byte{'\t'}, bytes.Repeat([]byte{' '}, tabsize))
|
2019-01-15 03:44:06 +00:00
|
|
|
} else {
|
2021-01-09 18:39:21 +00:00
|
|
|
ws = bytes.ReplaceAll(ws, bytes.Repeat([]byte{' '}, tabsize), []byte{'\t'})
|
2019-01-15 03:44:06 +00:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
l = bytes.TrimLeft(l, " \t")
|
2024-04-04 19:22:22 +00:00
|
|
|
|
|
|
|
|
b.Lock()
|
2019-01-15 03:44:06 +00:00
|
|
|
b.lines[i].data = append(ws, l...)
|
2024-04-04 19:22:22 +00:00
|
|
|
b.Unlock()
|
|
|
|
|
|
2020-02-19 19:41:30 +00:00
|
|
|
b.MarkModified(i, i)
|
2019-01-15 03:44:06 +00:00
|
|
|
dirty = true
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
b.isModified = dirty
|
|
|
|
|
}
|
2019-06-15 22:22:36 +00:00
|
|
|
|
|
|
|
|
// ParseCursorLocation turns a cursor location like 10:5 (LINE:COL)
|
|
|
|
|
// into a loc
|
|
|
|
|
func ParseCursorLocation(cursorPositions []string) (Loc, error) {
|
|
|
|
|
startpos := Loc{0, 0}
|
|
|
|
|
var err error
|
|
|
|
|
|
|
|
|
|
// if no positions are available exit early
|
|
|
|
|
if cursorPositions == nil {
|
|
|
|
|
return startpos, errors.New("No cursor positions were provided.")
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
startpos.Y, err = strconv.Atoi(cursorPositions[0])
|
2020-02-02 19:20:39 +00:00
|
|
|
startpos.Y--
|
2019-06-15 22:22:36 +00:00
|
|
|
if err == nil {
|
|
|
|
|
if len(cursorPositions) > 1 {
|
|
|
|
|
startpos.X, err = strconv.Atoi(cursorPositions[1])
|
|
|
|
|
if startpos.X > 0 {
|
2020-02-02 19:20:39 +00:00
|
|
|
startpos.X--
|
2019-06-15 22:22:36 +00:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return startpos, err
|
|
|
|
|
}
|
2019-08-02 21:48:59 +00:00
|
|
|
|
2019-12-26 22:59:23 +00:00
|
|
|
// Line returns the string representation of the given line number
|
2019-08-02 21:48:59 +00:00
|
|
|
func (b *Buffer) Line(i int) string {
|
|
|
|
|
return string(b.LineBytes(i))
|
|
|
|
|
}
|
2019-08-06 03:43:34 +00:00
|
|
|
|
2020-02-02 19:20:39 +00:00
|
|
|
func (b *Buffer) Write(bytes []byte) (n int, err error) {
|
|
|
|
|
b.EventHandler.InsertBytes(b.End(), bytes)
|
|
|
|
|
return len(bytes), nil
|
|
|
|
|
}
|
|
|
|
|
|
2024-05-12 18:35:07 +00:00
|
|
|
func (b *Buffer) updateDiff(synchronous bool) {
|
2020-02-08 07:56:24 +00:00
|
|
|
b.diffLock.Lock()
|
|
|
|
|
defer b.diffLock.Unlock()
|
|
|
|
|
|
|
|
|
|
b.diff = make(map[int]DiffStatus)
|
|
|
|
|
|
|
|
|
|
if b.diffBase == nil {
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
differ := dmp.New()
|
2024-05-12 18:35:07 +00:00
|
|
|
|
|
|
|
|
if !synchronous {
|
|
|
|
|
b.Lock()
|
|
|
|
|
}
|
|
|
|
|
bytes := b.Bytes()
|
|
|
|
|
if !synchronous {
|
|
|
|
|
b.Unlock()
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
baseRunes, bufferRunes, _ := differ.DiffLinesToRunes(string(b.diffBase), string(bytes))
|
2020-02-08 07:56:24 +00:00
|
|
|
diffs := differ.DiffMainRunes(baseRunes, bufferRunes, false)
|
|
|
|
|
lineN := 0
|
|
|
|
|
|
|
|
|
|
for _, diff := range diffs {
|
|
|
|
|
lineCount := len([]rune(diff.Text))
|
|
|
|
|
|
|
|
|
|
switch diff.Type {
|
|
|
|
|
case dmp.DiffEqual:
|
|
|
|
|
lineN += lineCount
|
|
|
|
|
case dmp.DiffInsert:
|
|
|
|
|
var status DiffStatus
|
|
|
|
|
if b.diff[lineN] == DSDeletedAbove {
|
|
|
|
|
status = DSModified
|
|
|
|
|
} else {
|
|
|
|
|
status = DSAdded
|
|
|
|
|
}
|
|
|
|
|
for i := 0; i < lineCount; i++ {
|
|
|
|
|
b.diff[lineN] = status
|
|
|
|
|
lineN++
|
|
|
|
|
}
|
|
|
|
|
case dmp.DiffDelete:
|
|
|
|
|
b.diff[lineN] = DSDeletedAbove
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// UpdateDiff computes the diff between the diff base and the buffer content.
|
|
|
|
|
// The update may be performed synchronously or asynchronously.
|
|
|
|
|
// If an asynchronous update is already pending when UpdateDiff is called,
|
2024-05-12 18:02:51 +00:00
|
|
|
// UpdateDiff does not schedule another update.
|
|
|
|
|
func (b *Buffer) UpdateDiff() {
|
2020-02-08 07:56:24 +00:00
|
|
|
if b.updateDiffTimer != nil {
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
lineCount := b.LinesNum()
|
|
|
|
|
if b.diffBaseLineCount > lineCount {
|
|
|
|
|
lineCount = b.diffBaseLineCount
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if lineCount < 1000 {
|
2024-05-12 18:35:07 +00:00
|
|
|
b.updateDiff(true)
|
2020-02-08 07:56:24 +00:00
|
|
|
} else if lineCount < 30000 {
|
|
|
|
|
b.updateDiffTimer = time.AfterFunc(500*time.Millisecond, func() {
|
|
|
|
|
b.updateDiffTimer = nil
|
2024-05-12 18:35:07 +00:00
|
|
|
b.updateDiff(false)
|
2024-05-12 18:02:51 +00:00
|
|
|
screen.Redraw()
|
2020-02-08 07:56:24 +00:00
|
|
|
})
|
|
|
|
|
} else {
|
|
|
|
|
// Don't compute diffs for very large files
|
|
|
|
|
b.diffLock.Lock()
|
|
|
|
|
b.diff = make(map[int]DiffStatus)
|
|
|
|
|
b.diffLock.Unlock()
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// SetDiffBase sets the text that is used as the base for diffing the buffer content
|
|
|
|
|
func (b *Buffer) SetDiffBase(diffBase []byte) {
|
|
|
|
|
b.diffBase = diffBase
|
|
|
|
|
if diffBase == nil {
|
|
|
|
|
b.diffBaseLineCount = 0
|
|
|
|
|
} else {
|
|
|
|
|
b.diffBaseLineCount = strings.Count(string(diffBase), "\n")
|
|
|
|
|
}
|
2024-05-12 18:02:51 +00:00
|
|
|
b.UpdateDiff()
|
2020-02-08 07:56:24 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// DiffStatus returns the diff status for a line in the buffer
|
|
|
|
|
func (b *Buffer) DiffStatus(lineN int) DiffStatus {
|
|
|
|
|
b.diffLock.RLock()
|
|
|
|
|
defer b.diffLock.RUnlock()
|
|
|
|
|
// Note that the zero value for DiffStatus is equal to DSUnchanged
|
|
|
|
|
return b.diff[lineN]
|
|
|
|
|
}
|
|
|
|
|
|
2023-04-20 22:23:35 +00:00
|
|
|
// FindNextDiffLine returns the line number of the next block of diffs.
|
|
|
|
|
// If `startLine` is already in a block of diffs, lines in that block are skipped.
|
|
|
|
|
func (b *Buffer) FindNextDiffLine(startLine int, forward bool) (int, error) {
|
|
|
|
|
if b.diff == nil {
|
|
|
|
|
return 0, errors.New("no diff data")
|
|
|
|
|
}
|
|
|
|
|
startStatus, ok := b.diff[startLine]
|
|
|
|
|
if !ok {
|
|
|
|
|
startStatus = DSUnchanged
|
|
|
|
|
}
|
|
|
|
|
curLine := startLine
|
|
|
|
|
for {
|
|
|
|
|
curStatus, ok := b.diff[curLine]
|
|
|
|
|
if !ok {
|
|
|
|
|
curStatus = DSUnchanged
|
|
|
|
|
}
|
|
|
|
|
if curLine < 0 || curLine > b.LinesNum() {
|
|
|
|
|
return 0, errors.New("no next diff hunk")
|
|
|
|
|
}
|
|
|
|
|
if curStatus != startStatus {
|
|
|
|
|
if startStatus != DSUnchanged && curStatus == DSUnchanged {
|
|
|
|
|
// Skip over the block of unchanged text
|
|
|
|
|
startStatus = DSUnchanged
|
|
|
|
|
} else {
|
|
|
|
|
return curLine, nil
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
if forward {
|
|
|
|
|
curLine++
|
|
|
|
|
} else {
|
|
|
|
|
curLine--
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2021-09-28 20:39:03 +00:00
|
|
|
// SearchMatch returns true if the given location is within a match of the last search.
|
|
|
|
|
// It is used for search highlighting
|
|
|
|
|
func (b *Buffer) SearchMatch(pos Loc) bool {
|
|
|
|
|
return b.LineArray.SearchMatch(b, pos)
|
|
|
|
|
}
|
|
|
|
|
|
2019-12-26 22:59:23 +00:00
|
|
|
// WriteLog writes a string to the log buffer
|
2019-08-06 03:43:34 +00:00
|
|
|
func WriteLog(s string) {
|
|
|
|
|
LogBuf.EventHandler.Insert(LogBuf.End(), s)
|
|
|
|
|
}
|
2020-01-16 03:25:08 +00:00
|
|
|
|
|
|
|
|
// GetLogBuf returns the log buffer
|
|
|
|
|
func GetLogBuf() *Buffer {
|
|
|
|
|
return LogBuf
|
|
|
|
|
}
|