diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index d088c35..db0e553 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -1,28 +1,26 @@ -name: lint +name: Lint on: push: branches: - main pull_request: -env: - GOPRIVATE: "github.com/speakeasy-api" + +permissions: + contents: read + jobs: - golangci: - name: lint + golangci-lint: + name: golangci-lint runs-on: ubuntu-latest steps: - - uses: actions/setup-go@v3 - with: - go-version: 1.21 + - uses: actions/checkout@v4 - - name: Configure git for private modules - env: - GIT_AUTH_TOKEN: ${{ secrets.BOT_REPO_TOKEN }} - run: git config --global url."https://speakeasybot:${GITHUB_TOKEN}@github.com".insteadOf "https://github.com" + - uses: actions/setup-go@v5 + with: + go-version-file: go.mod - - uses: actions/checkout@v3 - name: golangci-lint - uses: golangci/golangci-lint-action@v3 + uses: golangci/golangci-lint-action@v7 with: - version: v1.57.2 + version: v2.1.6 diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 52fa341..545f77c 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -5,27 +5,20 @@ on: branches: - main pull_request: -env: - GOPRIVATE: "github.com/speakeasy-api" -jobs: - tests: - runs-on: ubuntu-latest - - strategy: - fail-fast: true - matrix: - go-version: [1.21.x] - name: Tests - Go ${{ matrix.go-version }} +permissions: + contents: read +jobs: + test: + runs-on: ubuntu-latest + name: Tests steps: - - name: Checkout the code - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - - name: Install Go - uses: actions/setup-go@v2 + - uses: actions/setup-go@v5 with: - go-version: ${{ matrix.go-version }} + go-version-file: go.mod - - name: Run the tests - run: go test ./... \ No newline at end of file + - name: Run tests + run: go test -race -count=1 ./... diff --git a/.golangci.yaml b/.golangci.yaml index 199dfe9..dee5401 100644 --- a/.golangci.yaml +++ b/.golangci.yaml @@ -1,8 +1,92 @@ +version: "2" + run: - go: "1.19" -linters-settings: - tagliatelle: - case: - use-field-name: true + go: "1.21" + timeout: 5m + +formatters: + enable: + - gofumpt + + settings: + gofumpt: + extra-rules: true + +linters: + enable: + # Bugs / correctness + - govet + - staticcheck + - errcheck + - ineffassign + - bodyclose + - nilerr + - copyloopvar + + # Style / simplicity + - revive + - misspell + - unconvert + - unused + - nolintlint + - gocritic + - dupword + - usestdlibvars + + # Complexity + - gocyclo + - cyclop + + # Testing + - testifylint + + # Tags + - tagliatelle + + settings: + gocyclo: + min-complexity: 30 + + cyclop: + max-complexity: 30 + + gocritic: + enabled-tags: + - diagnostic + - style + - performance + + revive: rules: - json: snake + - name: blank-imports + - name: context-as-argument + - name: dot-imports + - name: error-naming + - name: error-return + - name: exported + disabled: true + - name: increment-decrement + - name: indent-error-flow + - name: range + - name: receiver-naming + - name: redefines-builtin-id + - name: superfluous-else + - name: unreachable-code + - name: unused-parameter + + tagliatelle: + case: + use-field-name: true + rules: + json: snake + + nolintlint: + require-explanation: true + require-specific: true + + testifylint: + enable-all: true + +issues: + max-issues-per-linter: 0 + max-same-issues: 0 diff --git a/Makefile b/Makefile index 925e94c..3d6c71a 100644 --- a/Makefile +++ b/Makefile @@ -1,5 +1,10 @@ -.PHONY: * +.PHONY: lint fmt test lint: - gofumpt -l -w . golangci-lint run + +fmt: + gofumpt -l -w . + +test: + go test -race -count=1 ./... diff --git a/api_significance.go b/api_significance.go new file mode 100644 index 0000000..ce5878a --- /dev/null +++ b/api_significance.go @@ -0,0 +1,25 @@ +package git_diff_parser + +type ( + ContentChange = contentChange + ContentChangeType = contentChangeType + FileDiff = fileDiff + FileDiffType = fileDiffType +) + +const ( + ContentChangeTypeAdd = contentChangeTypeAdd + ContentChangeTypeDelete = contentChangeTypeDelete + ContentChangeTypeModify = contentChangeTypeModify + ContentChangeTypeNOOP = contentChangeTypeNOOP + + FileDiffTypeAdded = fileDiffTypeAdded + FileDiffTypeDeleted = fileDiffTypeDeleted + FileDiffTypeModified = fileDiffTypeModified +) + +func SignificantChange(diff string, isSignificant func(*FileDiff, *ContentChange) (bool, string)) (significant bool, msg string, err error) { + return significantChange(diff, func(fileDiff *fileDiff, change *contentChange) (bool, string) { + return isSignificant(fileDiff, change) + }) +} diff --git a/apply.go b/apply.go new file mode 100644 index 0000000..78ec235 --- /dev/null +++ b/apply.go @@ -0,0 +1,231 @@ +package git_diff_parser + +import ( + "bytes" + "errors" + "strings" +) + +type patchHunk struct { + header string + oldStart int + oldCount int + newStart int + newCount int + lines []patchLine +} + +type patchLine struct { + kind byte + text string + hasNewline bool + oldEOF bool + newEOF bool +} + +type fileLine struct { + text string + hasNewline bool + eofMarker bool +} + +func ApplyFile(pristine, patchData []byte) ([]byte, error) { + result, err := applyFileWithOptions(pristine, patchData, defaultApplyOptions()) + return result.Content, err +} + +func ApplyFileWithConflicts(pristine, patchData []byte) ([]byte, error) { + result, err := applyFileWithOptions(pristine, patchData, defaultMergeApplyOptions()) + return result.Content, err +} + +func applyFileWithOptions(pristine, patchData []byte, options applyOptions) (applyResult, error) { + return newPatchApply(options).applyFileWithResult(pristine, patchData) +} + +func (p *patchApply) applyFile(pristine, patchData []byte) ([]byte, error) { + result, err := p.applyFileWithResult(pristine, patchData) + return result.Content, err +} + +func (p *patchApply) applyFileWithResult(pristine, patchData []byte) (applyResult, error) { + patch, err := p.validateAndParsePatch(patchData) + if err != nil { + return applyResult{}, err + } + return p.applyValidatedPatch(pristine, patch) +} + +func (p *patchApply) applyValidatedPatch(pristine []byte, patch validatedPatch) (applyResult, error) { + outcome, err := p.newApplySession(pristine).apply(patch) + if err != nil { + return applyResult{}, err + } + + result := renderApplyResult(pristine, outcome, p.options) + if len(outcome.conflicts) == 0 { + return result, nil + } + + if p.options.Mode == applyModeMerge { + return result, &applyError{ + MergeConflicts: len(outcome.conflicts), + ConflictingHunks: len(outcome.conflicts), + } + } + + return result, &applyError{DirectMisses: len(outcome.conflicts)} +} + +func validateApplyFileDiff(fileDiff *fileDiff) error { + switch { + case fileDiff.IsBinary: + return errors.New("binary patches are not supported") + case fileDiff.NewMode != "": + return errors.New("file mode changes are not supported") + case fileDiff.Type == fileDiffTypeAdded || fileDiff.Type == fileDiffTypeDeleted: + return errors.New("patches may only modify existing files") + case len(fileDiff.Hunks) == 0: + return errors.New("patch contains no hunks") + case fileDiff.RenameFrom != "" || fileDiff.RenameTo != "" || fileDiff.CopyFrom != "" || fileDiff.CopyTo != "": + return errors.New("unsupported patch syntax: copy and rename headers are not supported") + case !fileDiffHasChanges(fileDiff): + return errors.New("patch contains no effective changes") + default: + return nil + } +} + +func fileDiffHasChanges(fileDiff *fileDiff) bool { + for _, hunk := range fileDiff.Hunks { + for _, change := range hunk.ChangeList { + if change.Type != contentChangeTypeNOOP { + return true + } + } + } + return false +} + +func desiredLines(hunk patchHunk) []fileLine { + return desiredLinesWindow(hunk, 0, len(hunk.lines)) +} + +func desiredLinesWindow(hunk patchHunk, start, end int) []fileLine { + lines := make([]fileLine, 0, len(hunk.lines)) + for _, line := range hunk.lines[start:end] { + if line.kind == ' ' || line.kind == '+' { + lines = append(lines, fileLine{text: line.text, hasNewline: line.hasNewline, eofMarker: line.newEOF}) + } + } + return lines +} + +func preimageLinesWindow(hunk patchHunk, start, end int) []fileLine { + lines := make([]fileLine, 0, len(hunk.lines)) + for _, line := range hunk.lines[start:end] { + if line.kind == ' ' || line.kind == '-' { + lines = append(lines, fileLine{text: line.text, hasNewline: line.hasNewline, eofMarker: line.oldEOF}) + } + } + return lines +} + +func matchFragment(source []fileLine, start int, fragment []fileLine, ignoreWhitespace bool) bool { + if len(fragment) == 0 { + return true + } + if start < 0 || start+len(fragment) > len(source) { + return false + } + + for i := range fragment { + if !lineMatches(source[start+i], fragment[i], ignoreWhitespace) { + return false + } + } + + return true +} + +func lineMatches(left, right fileLine, ignoreWhitespace bool) bool { + if left.hasNewline != right.hasNewline || left.eofMarker != right.eofMarker { + return false + } + if left.text == right.text { + return true + } + if !ignoreWhitespace { + return false + } + return normalizeWhitespace(left.text) == normalizeWhitespace(right.text) +} + +func normalizeWhitespace(text string) string { + return strings.Join(strings.Fields(text), " ") +} + +func appendSourceLines(dst []fileLine, src ...fileLine) []fileLine { + return append(dst, src...) +} + +func ensureTrailingNewline(lines []fileLine) []fileLine { + if len(lines) == 0 { + return lines + } + lines[len(lines)-1].hasNewline = true + return lines +} + +func splitFileLines(content []byte) []fileLine { + rawLines := splitLinesPreserveNewline(string(content)) + lines := make([]fileLine, 0, len(rawLines)) + for _, raw := range rawLines { + lines = append(lines, fileLine{ + text: trimSingleLineEnding(raw), + hasNewline: strings.HasSuffix(raw, "\n"), + }) + } + if len(content) > 0 && content[len(content)-1] == '\n' { + lines = append(lines, fileLine{text: "", hasNewline: true, eofMarker: true}) + } + return lines +} + +func joinFileLines(lines []fileLine) []byte { + var buf bytes.Buffer + for _, line := range lines { + if line.eofMarker { + continue + } + buf.WriteString(line.text) + if line.hasNewline { + buf.WriteByte('\n') + } + } + return buf.Bytes() +} + +func trimSingleLineEnding(s string) string { + s = strings.TrimSuffix(s, "\n") + return s +} + +func splitLinesPreserveNewline(s string) []string { + if s == "" { + return nil + } + lines := strings.SplitAfter(s, "\n") + if lines[len(lines)-1] == "" { + lines = lines[:len(lines)-1] + } + return lines +} + +func normalizePatchForValidation(patchData []byte) []byte { + trimmed := bytes.TrimSpace(patchData) + if bytes.HasPrefix(trimmed, []byte("diff --git ")) { + return patchData + } + return []byte("diff --git a/__patch__ b/__patch__\n" + string(patchData)) +} diff --git a/apply_internal_test.go b/apply_internal_test.go new file mode 100644 index 0000000..5338790 --- /dev/null +++ b/apply_internal_test.go @@ -0,0 +1,111 @@ +package git_diff_parser + +import ( + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestFindPosRejectsAlreadyAppliedPostimage(t *testing.T) { + session := &applySession{ + sourceLines: splitFileLines([]byte("a\nb\nx\nc\n")), + } + hunk := patchHunk{ + oldStart: 1, + oldCount: 3, + newCount: 4, + lines: []patchLine{ + {kind: ' ', text: "a", hasNewline: true}, + {kind: ' ', text: "b", hasNewline: true}, + {kind: '+', text: "x", hasNewline: true}, + {kind: ' ', text: "c", hasNewline: true}, + }, + } + + match, matched := session.findPos(hunk) + assert.Equal(t, matchedHunk{}, match) + assert.False(t, matched) +} + +func TestMatchFragment_IgnoreWhitespace(t *testing.T) { + source := splitFileLines([]byte("alpha\n beta\ncharlie\n")) + fragment := []fileLine{ + {text: "alpha", hasNewline: true}, + {text: "beta", hasNewline: true}, + {text: "charlie", hasNewline: true}, + } + + require.False(t, matchFragment(source, 0, fragment, false)) + require.True(t, matchFragment(source, 0, fragment, true)) +} + +func TestFindPosForFragmentMatchesExactBlock(t *testing.T) { + session := &applySession{ + sourceLines: splitFileLines([]byte("zero\nalpha\nbravo\ncharlie\n")), + } + match, matched := session.findPosForFragment(1, []fileLine{ + {text: "alpha", hasNewline: true}, + {text: "bravo", hasNewline: true}, + {text: "charlie", hasNewline: true}, + }, false, false) + require.True(t, matched) + assert.Equal(t, 1, match) +} + +func TestFindPosWithMinContextReducesLeadingContext(t *testing.T) { + session := &applySession{ + applier: &patchApply{options: applyOptions{MinContext: 1, MinContextSet: true}}, + sourceLines: splitFileLines([]byte("a0\nA1\na2\na3\na4\na5\na6\n")), + patched: make([]bool, 7), + } + hunk := patchHunk{ + oldStart: 2, + oldCount: 5, + newStart: 2, + newCount: 5, + lines: []patchLine{ + {kind: ' ', text: "a1", hasNewline: true}, + {kind: ' ', text: "a2", hasNewline: true}, + {kind: '-', text: "a3", hasNewline: true}, + {kind: '+', text: "A3", hasNewline: true}, + {kind: ' ', text: "a4", hasNewline: true}, + {kind: ' ', text: "a5", hasNewline: true}, + }, + } + + match, matched := session.findPos(hunk) + require.True(t, matched) + assert.Equal(t, 2, match.sourceStart) + assert.Equal(t, 5, match.sourceEnd) + assert.Equal(t, 1, match.hunkStart) + assert.Equal(t, 5, match.hunkEnd) +} + +func TestFindPosForFragmentRejectsPatchedRangesWithoutOverlap(t *testing.T) { + session := &applySession{ + sourceLines: splitFileLines([]byte("zero\nalpha\nbravo\ncharlie\n")), + patched: []bool{false, true, true, false}, + } + + _, matched := session.findPosForFragment(1, []fileLine{ + {text: "alpha", hasNewline: true}, + {text: "bravo", hasNewline: true}, + }, false, false) + assert.False(t, matched) +} + +func TestFindPosForFragmentAllowsPatchedRangesWithOverlap(t *testing.T) { + session := &applySession{ + applier: &patchApply{options: applyOptions{AllowOverlap: true}}, + sourceLines: splitFileLines([]byte("zero\nalpha\nbravo\ncharlie\n")), + patched: []bool{false, true, true, false}, + } + + match, matched := session.findPosForFragment(1, []fileLine{ + {text: "alpha", hasNewline: true}, + {text: "bravo", hasNewline: true}, + }, false, false) + require.True(t, matched) + assert.Equal(t, 1, match) +} diff --git a/apply_options.go b/apply_options.go new file mode 100644 index 0000000..ac1a39d --- /dev/null +++ b/apply_options.go @@ -0,0 +1,82 @@ +package git_diff_parser + +import "math" + +// applyMode controls how the apply engine treats hunks that cannot be placed +// directly into the target content. +type applyMode int + +const ( + // applyModeApply keeps the output neutral when a hunk cannot be applied. + applyModeApply applyMode = iota + // applyModeMerge renders conflict markers into the output for misses. + applyModeMerge +) + +// conflictLabels controls the labels rendered into conflict markers. +// The zero value renders neutral markers without any labels. +type conflictLabels struct { + Current string + Incoming string +} + +// applyOptions configures the apply engine. +type applyOptions struct { + Mode applyMode + ConflictLabels conflictLabels + IgnoreWhitespace bool + AllowOverlap bool + MinContext int + MinContextSet bool +} + +func defaultApplyOptions() applyOptions { + return applyOptions{ + Mode: applyModeApply, + MinContext: math.MaxInt, + } +} + +func defaultMergeApplyOptions() applyOptions { + options := defaultApplyOptions() + options.Mode = applyModeMerge + options.ConflictLabels = conflictLabels{ + Current: "Current", + Incoming: "Incoming patch", + } + return options +} + +// patchApply holds apply-time configuration and mirrors Git's stateful apply design. +type patchApply struct { + options applyOptions +} + +func newPatchApply(options applyOptions) *patchApply { + return &patchApply{options: normalizeApplyOptions(options)} +} + +func (o applyOptions) normalize() applyOptions { + if o.Mode != applyModeMerge { + o.Mode = applyModeApply + } + if o.Mode == applyModeMerge { + defaults := defaultMergeApplyOptions() + if !o.MinContextSet { + o.MinContext = defaultApplyOptions().MinContext + } + if o.ConflictLabels.Current == "" { + o.ConflictLabels.Current = defaults.ConflictLabels.Current + } + if o.ConflictLabels.Incoming == "" { + o.ConflictLabels.Incoming = defaults.ConflictLabels.Incoming + } + } else if !o.MinContextSet { + o.MinContext = defaultApplyOptions().MinContext + } + return o +} + +func normalizeApplyOptions(options applyOptions) applyOptions { + return options.normalize() +} diff --git a/apply_render.go b/apply_render.go new file mode 100644 index 0000000..cdd2f39 --- /dev/null +++ b/apply_render.go @@ -0,0 +1,88 @@ +package git_diff_parser + +import "bytes" + +func renderApplyResult(pristine []byte, outcome applyOutcome, options applyOptions) applyResult { + result := applyResult{ + Content: joinFileLines(outcome.content), + Reject: renderRejectContent(outcome.rejectHead, outcome.conflicts), + } + + if len(outcome.conflicts) == 0 { + return result + } + + switch options.Mode { + case applyModeMerge: + result.Content = renderMergeContent(outcome.content, outcome.conflicts, options.ConflictLabels) + result.MergeConflicts = len(outcome.conflicts) + default: + result.Content = append([]byte{}, pristine...) + result.DirectMisses = len(outcome.conflicts) + } + + return result +} + +func renderMergeContent(base []fileLine, conflicts []applyConflict, labels conflictLabels) []byte { + if len(conflicts) == 0 { + return joinFileLines(base) + } + + rendered := append([]fileLine(nil), base...) + for i := len(conflicts) - 1; i >= 0; i-- { + conflict := conflicts[i] + if conflict.offset < 0 || conflict.offset > len(rendered) { + continue + } + + end := conflict.offset + len(conflict.ours) + if end > len(rendered) { + end = len(rendered) + } + + replacement := renderConflictLines(labels, conflict.ours, conflict.theirs) + rendered = append(rendered[:conflict.offset], append(replacement, rendered[end:]...)...) + } + + return joinFileLines(rendered) +} + +func renderRejectContent(header string, conflicts []applyConflict) []byte { + if len(conflicts) == 0 { + return nil + } + + var buf bytes.Buffer + if header != "" { + buf.WriteString(header) + buf.WriteByte('\n') + } + for i := range conflicts { + if conflicts[i].hunk.header != "" { + buf.WriteString(conflicts[i].hunk.header) + buf.WriteByte('\n') + } + for _, line := range conflicts[i].hunk.lines { + buf.WriteByte(line.kind) + buf.WriteString(line.text) + if line.hasNewline { + buf.WriteByte('\n') + } + } + } + return buf.Bytes() +} + +func renderConflictLines(labels conflictLabels, ours, theirs []fileLine) []fileLine { + lines := []fileLine{ + {text: "<<<<<<< " + labels.Current, hasNewline: true}, + } + lines = appendSourceLines(lines, ours...) + lines = ensureTrailingNewline(lines) + lines = append(lines, fileLine{text: "=======", hasNewline: true}) + lines = appendSourceLines(lines, theirs...) + lines = ensureTrailingNewline(lines) + lines = append(lines, fileLine{text: ">>>>>>> " + labels.Incoming, hasNewline: true}) + return lines +} diff --git a/apply_result.go b/apply_result.go new file mode 100644 index 0000000..74549a6 --- /dev/null +++ b/apply_result.go @@ -0,0 +1,69 @@ +package git_diff_parser + +import ( + "errors" + "fmt" +) + +var ErrPatchConflict = errors.New("patch conflict") + +// ApplyResult captures the patched content and the type of misses encountered +// while attempting to apply it. +type applyResult struct { + Content []byte + Reject []byte + DirectMisses int + MergeConflicts int +} + +type applyOutcome struct { + content []fileLine + conflicts []applyConflict + rejectHead string +} + +type applyConflict struct { + offset int + hunk patchHunk + ours []fileLine + theirs []fileLine +} + +// applyError reports the aggregate apply outcome. +type applyError struct { + DirectMisses int + MergeConflicts int + // ConflictingHunks keeps the legacy count available for callers that still + // reason about conflict hunks rather than the new miss/conflict split. + ConflictingHunks int +} + +func (e *applyError) Error() string { + if e == nil { + return "" + } + + if e.MergeConflicts > 0 || e.ConflictingHunks > 0 { + count := e.MergeConflicts + if count == 0 { + count = e.ConflictingHunks + } + if count == 1 { + return "patch conflict in 1 hunk" + } + return fmt.Sprintf("patch conflict in %d hunks", count) + } + + if e.DirectMisses > 0 { + if e.DirectMisses == 1 { + return "patch miss in 1 hunk" + } + return fmt.Sprintf("patch miss in %d hunks", e.DirectMisses) + } + + return "patch apply failed" +} + +func (e *applyError) Is(target error) bool { + return target == ErrPatchConflict +} diff --git a/apply_session.go b/apply_session.go new file mode 100644 index 0000000..4af7dd4 --- /dev/null +++ b/apply_session.go @@ -0,0 +1,367 @@ +package git_diff_parser + +import "fmt" + +type validatedPatch struct { + rejectHead string + hunks []patchHunk +} + +type applySession struct { + applier *patchApply + sourceLines []fileLine + patched []bool + image []fileLine + cursor int + conflicts []applyConflict + rejectHead string +} + +type matchedHunk struct { + sourceStart int + sourceEnd int + hunkStart int + hunkEnd int +} + +func (p *patchApply) validateAndParsePatch(patchData []byte) (validatedPatch, error) { + normalizedPatch := normalizePatchForValidation(patchData) + parsed, errs := parse(string(normalizedPatch)) + if len(errs) > 0 { + return validatedPatch{}, fmt.Errorf("unsupported patch syntax: %w", errs[0]) + } + if len(parsed.FileDiff) != 1 { + return validatedPatch{}, fmt.Errorf("expected exactly 1 file diff, found %d", len(parsed.FileDiff)) + } + + fileDiff := parsed.FileDiff[0] + if err := validateApplyFileDiff(&fileDiff); err != nil { + return validatedPatch{}, err + } + + hunks := make([]patchHunk, 0, len(fileDiff.Hunks)) + for i := range fileDiff.Hunks { + hunks = append(hunks, patchHunkFromHunk(&fileDiff.Hunks[i])) + } + + return validatedPatch{ + rejectHead: formatRejectHeader(&fileDiff), + hunks: hunks, + }, nil +} + +func (p *patchApply) newApplySession(pristine []byte) *applySession { + sourceLines := splitFileLines(pristine) + return &applySession{ + applier: p, + sourceLines: sourceLines, + patched: make([]bool, len(sourceLines)), + image: make([]fileLine, 0, len(sourceLines)), + } +} + +func (s *applySession) apply(patch validatedPatch) (applyOutcome, error) { + s.rejectHead = patch.rejectHead + + for _, hunk := range patch.hunks { + match, matched := s.findPos(hunk) + if !matched { + s.appendConflictingHunk(hunk) + continue + } + + s.applyHunk(hunk, match) + } + + s.appendSourceUntil(len(s.sourceLines)) + return applyOutcome{ + content: append([]fileLine(nil), s.image...), + conflicts: append([]applyConflict(nil), s.conflicts...), + rejectHead: s.rejectHead, + }, nil +} + +func (s *applySession) applyHunk(hunk patchHunk, match matchedHunk) { + s.appendSourceUntil(match.sourceStart) + + for _, hunkLine := range hunk.lines[match.hunkStart:match.hunkEnd] { + switch hunkLine.kind { + case ' ': + s.image = append(s.image, fileLine{text: hunkLine.text, hasNewline: hunkLine.hasNewline, eofMarker: hunkLine.newEOF}) + s.cursor++ + case '-': + s.cursor++ + case '+': + s.image = append(s.image, fileLine{text: hunkLine.text, hasNewline: hunkLine.hasNewline, eofMarker: hunkLine.newEOF}) + } + } + + if !s.allowOverlap() { + for i := match.sourceStart; i < match.sourceEnd && i < len(s.patched); i++ { + s.patched[i] = true + } + } +} + +func (s *applySession) appendConflictingHunk(hunk patchHunk) { + conflictStart := hunk.oldStart - 1 + if conflictStart < s.cursor { + conflictStart = s.cursor + } + if conflictStart > len(s.sourceLines) { + conflictStart = len(s.sourceLines) + } + + conflictEnd := conflictStart + hunk.oldCount + if conflictEnd > len(s.sourceLines) { + conflictEnd = len(s.sourceLines) + } + + s.appendSourceUntil(conflictStart) + offset := len(s.image) + ours := append([]fileLine(nil), s.sourceLines[conflictStart:conflictEnd]...) + theirs := desiredLines(hunk) + s.image = appendSourceLines(s.image, ours...) + s.conflicts = append(s.conflicts, applyConflict{ + offset: offset, + hunk: hunk, + ours: ours, + theirs: theirs, + }) + s.cursor = conflictEnd +} + +func (s *applySession) appendSourceUntil(limit int) { + if limit <= s.cursor { + return + } + s.image = appendSourceLines(s.image, s.sourceLines[s.cursor:limit]...) + s.cursor = limit +} + +func (s *applySession) findPos(hunk patchHunk) (matchedHunk, bool) { + preferred := hunk.oldStart - 1 + if hunk.oldCount == 0 { + preferred = hunk.oldStart + } + if preferred < s.cursor { + preferred = s.cursor + } + + postimage := desiredLines(hunk) + if hunk.newCount >= hunk.oldCount && preferred <= len(s.sourceLines) && matchFragment(s.sourceLines, preferred, postimage, s.ignoreWhitespace()) { + return matchedHunk{}, false + } + + matchBeginning := hunk.oldStart == 0 || hunk.oldStart == 1 + leading, trailing := hunkContext(hunk.lines) + matchEnd := trailing == 0 + + hunkStart := 0 + hunkEnd := len(hunk.lines) + + for { + preimage := preimageLinesWindow(hunk, hunkStart, hunkEnd) + if pos, ok := s.findPosForFragment(preferred, preimage, matchBeginning, matchEnd); ok { + return matchedHunk{ + sourceStart: pos, + sourceEnd: pos + len(preimage), + hunkStart: hunkStart, + hunkEnd: hunkEnd, + }, true + } + + if leading <= s.minContext() && trailing <= s.minContext() { + break + } + if matchBeginning || matchEnd { + matchBeginning = false + matchEnd = false + continue + } + if leading >= trailing && hunkStart < hunkEnd { + hunkStart++ + preferred-- + if preferred < s.cursor { + preferred = s.cursor + } + leading-- + } + if trailing > leading && hunkStart < hunkEnd { + hunkEnd-- + trailing-- + } + } + + return matchedHunk{}, false +} + +func (s *applySession) findPosForFragment(preferred int, fragment []fileLine, matchBeginning, matchEnd bool) (int, bool) { + maxStart := s.fragmentEndLimit(fragment) - len(fragment) + if maxStart < 0 { + maxStart = s.fragmentEndLimit(fragment) + } + if matchBeginning { + preferred = 0 + } else if matchEnd { + preferred = maxStart + } + if preferred > maxStart { + preferred = maxStart + } + if preferred < s.cursor { + preferred = s.cursor + } + + for offset := 0; ; offset++ { + left := preferred - offset + if left >= s.cursor && s.matchFragmentAt(left, fragment, matchBeginning, matchEnd) { + return left, true + } + + right := preferred + offset + if offset > 0 && right >= s.cursor && s.matchFragmentAt(right, fragment, matchBeginning, matchEnd) { + return right, true + } + + if left < s.cursor && right > maxStart { + break + } + } + + return 0, false +} + +func (s *applySession) matchFragmentAt(start int, fragment []fileLine, matchBeginning, matchEnd bool) bool { + if matchBeginning && start != 0 { + return false + } + if start < 0 { + return false + } + if len(fragment) == 0 { + if matchEnd { + return start == s.sourceContentLines() + } + return start <= s.sourceContentLines() + } + if start+len(fragment) > len(s.sourceLines) { + return false + } + if matchEnd && start+len(fragment) != s.fragmentEndLimit(fragment) { + return false + } + if !s.allowOverlap() { + for i := start; i < start+len(fragment); i++ { + if i < len(s.patched) && s.patched[i] { + return false + } + } + } + return matchFragment(s.sourceLines, start, fragment, s.ignoreWhitespace()) +} + +func patchHunkFromHunk(hunk *hunk) patchHunk { + lines := make([]patchLine, 0, len(hunk.Lines)) + for _, line := range hunk.Lines { + lines = append(lines, patchLine{ + kind: line.Kind, + text: line.Text, + hasNewline: line.HasNewline, + oldEOF: line.OldEOF, + newEOF: line.NewEOF, + }) + } + + return patchHunk{ + header: formatPatchHunkHeader(hunk), + oldStart: hunk.StartLineNumberOld, + oldCount: hunk.CountOld, + newStart: hunk.StartLineNumberNew, + newCount: hunk.CountNew, + lines: lines, + } +} + +func formatRejectHeader(fileDiff *fileDiff) string { + path := firstNonEmpty(fileDiff.ToFile, fileDiff.FromFile) + if path == "" { + return "" + } + return "diff a/" + path + " b/" + path + "\t(rejected hunks)" +} + +func formatPatchHunkHeader(hunk *hunk) string { + oldRange := formatPatchHunkRange(hunk.StartLineNumberOld, hunk.CountOld) + newRange := formatPatchHunkRange(hunk.StartLineNumberNew, hunk.CountNew) + return fmt.Sprintf("@@ -%s +%s @@", oldRange, newRange) +} + +func formatPatchHunkRange(start, count int) string { + if count == 1 { + return fmt.Sprintf("%d", start) + } + return fmt.Sprintf("%d,%d", start, count) +} + +func (s *applySession) ignoreWhitespace() bool { + return s.applier != nil && s.applier.options.IgnoreWhitespace +} + +func (s *applySession) allowOverlap() bool { + return s.applier != nil && s.applier.options.AllowOverlap +} + +func (s *applySession) minContext() int { + if s.applier == nil { + return 0 + } + return s.applier.options.MinContext +} + +func (s *applySession) sourceContentLines() int { + if n := len(s.sourceLines); n > 0 && s.sourceLines[n-1].eofMarker { + return n - 1 + } + return len(s.sourceLines) +} + +func (s *applySession) fragmentEndLimit(fragment []fileLine) int { + if len(fragment) > 0 && fragment[len(fragment)-1].eofMarker { + return len(s.sourceLines) + } + return s.sourceContentLines() +} + +func hunkContext(lines []patchLine) (leading, trailing int) { + firstChange := len(lines) + lastChange := -1 + for i, line := range lines { + if line.kind == '+' || line.kind == '-' { + if firstChange == len(lines) { + firstChange = i + } + lastChange = i + } + } + + if lastChange < 0 { + return len(lines), len(lines) + } + + leading = 0 + for i := 0; i < firstChange; i++ { + if lines[i].kind == ' ' { + leading++ + } + } + + trailing = 0 + for i := len(lines) - 1; i > lastChange; i-- { + if lines[i].kind == ' ' { + trailing++ + } + } + + return leading, trailing +} diff --git a/apply_test.go b/apply_test.go new file mode 100644 index 0000000..6d3b538 --- /dev/null +++ b/apply_test.go @@ -0,0 +1,1021 @@ +package git_diff_parser + +import ( + "bytes" + "errors" + "os" + "path/filepath" + "strings" + "testing" + + "github.com/pmezard/go-difflib/difflib" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +type applyFixtureFiles struct { + src []byte + patch []byte + out []byte +} + +const ( + defaultCurrentConflictMarker = "<<<<<<< Current" + defaultIncomingConflictMarker = ">>>>>>> Incoming patch" +) + +func TestApplyFile_TextFixtures(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + fixture string + wantErr string + conflict bool + }{ + {name: "new file", fixture: "text_fragment_new"}, + {name: "add start", fixture: "text_fragment_add_start"}, + {name: "add middle", fixture: "text_fragment_add_middle"}, + {name: "add end", fixture: "text_fragment_add_end"}, + {name: "add end no eof", fixture: "text_fragment_add_end_noeol"}, + {name: "change start", fixture: "text_fragment_change_start"}, + {name: "change middle", fixture: "text_fragment_change_middle"}, + {name: "change end", fixture: "text_fragment_change_end"}, + {name: "change end eol", fixture: "text_fragment_change_end_eol"}, + {name: "change exact", fixture: "text_fragment_change_exact"}, + {name: "change single no eof", fixture: "text_fragment_change_single_noeol"}, + {name: "delete all", fixture: "text_fragment_delete_all"}, + {name: "short src before", fixture: "text_fragment_error_short_src_before", wantErr: "patch conflict", conflict: true}, + {name: "short src", fixture: "text_fragment_error_short_src", wantErr: "patch conflict", conflict: true}, + {name: "context conflict", fixture: "text_fragment_error_context_conflict", wantErr: "patch conflict", conflict: true}, + {name: "delete conflict", fixture: "text_fragment_error_delete_conflict", wantErr: "patch conflict", conflict: true}, + } + + for _, test := range tests { + test := test + t.Run(test.name, func(t *testing.T) { + t.Parallel() + + files := loadApplyFixture(t, test.fixture) + applyFn := ApplyFile + if test.conflict { + applyFn = ApplyFileWithConflicts + } + applied, err := applyFn(files.src, files.patch) + + if test.wantErr != "" { + require.Error(t, err) + assert.Contains(t, err.Error(), test.wantErr) + if test.conflict { + var applyErr *applyError + require.ErrorAs(t, err, &applyErr) + require.ErrorIs(t, err, ErrPatchConflict) + assert.Contains(t, string(applied), defaultCurrentConflictMarker) + assert.Contains(t, string(applied), defaultIncomingConflictMarker) + } + return + } + + require.NoError(t, err) + assert.True(t, bytes.Equal(expectedApplyFixtureOutput(t, files), applied)) + }) + } +} + +func TestApplyFile_RejectsUnsupportedFixtures(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + patch []byte + wantErr string + }{ + { + name: "rename patch", + patch: []byte(`diff --git a/sdk.go b/custom/sdk.go +similarity index 100% +rename from sdk.go +rename to custom/sdk.go +`), + wantErr: "patch contains no hunks", + }, + { + name: "mode only patch", + patch: []byte(`diff --git a/sdk.go b/sdk.go +old mode 100644 +new mode 100755 +`), + wantErr: "file mode changes are not supported", + }, + { + name: "binary patch", + patch: []byte(`diff --git a/sdk.go b/sdk.go +GIT binary patch +literal 3 +abc +`), + wantErr: "binary patches are not supported", + }, + } + + for _, test := range tests { + test := test + t.Run(test.name, func(t *testing.T) { + t.Parallel() + + _, err := ApplyFile([]byte("package testsdk\n"), test.patch) + require.Error(t, err) + assert.Contains(t, err.Error(), test.wantErr) + }) + } +} + +func TestApplyFile_RejectsHeaderOnlyAndNoOpPatches(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + patch []byte + wantErr string + }{ + { + name: "header only", + patch: []byte(`diff --git a/sdk.go b/sdk.go +--- a/sdk.go ++++ b/sdk.go +`), + wantErr: "patch contains no hunks", + }, + { + name: "no op hunk", + patch: []byte(`diff --git a/sdk.go b/sdk.go +--- a/sdk.go ++++ b/sdk.go +@@ -1,1 +1,1 @@ + package testsdk +`), + wantErr: "patch contains no effective changes", + }, + } + + for _, test := range tests { + test := test + t.Run(test.name, func(t *testing.T) { + t.Parallel() + + _, err := ApplyFile([]byte("package testsdk\n"), test.patch) + require.Error(t, err) + assert.Contains(t, err.Error(), test.wantErr) + }) + } +} + +func TestApplyFile_NoNewlineMatrix(t *testing.T) { + t.Parallel() + + files := []struct { + name string + content []byte + }{ + {name: "0", content: []byte("a\nb\n")}, + {name: "1", content: []byte("a\nb\nc\n")}, + {name: "2", content: []byte("a\nb")}, + {name: "3", content: []byte("a\nc\nb")}, + } + + for i := range files { + for j := range files { + if i == j { + continue + } + + from := files[i] + to := files[j] + name := from.name + " to " + to.name + + t.Run(name, func(t *testing.T) { + t.Parallel() + + patch := mustReadFile(t, filepath.Join("testdata", "apply", "t4101", "diff."+from.name+"-"+to.name)) + applied, err := ApplyFile(from.content, patch) + require.NoError(t, err) + assert.Equal(t, to.content, applied) + }) + } + } +} + +func TestApplyFile_BoundaryCases(t *testing.T) { + t.Parallel() + + original := []byte("b\nc\nd\ne\nf\ng\nh\ni\nj\nk\nl\nm\nn\no\np\nq\nr\ns\nt\nu\nv\nw\nx\ny\n") + tests := []struct { + name string + want []byte + requiresUnidiff0 bool + }{ + {name: "add head", want: []byte("a\nb\nc\nd\ne\nf\ng\nh\ni\nj\nk\nl\nm\nn\no\np\nq\nr\ns\nt\nu\nv\nw\nx\ny\n"), requiresUnidiff0: true}, + {name: "insert second", want: []byte("b\na\nc\nd\ne\nf\ng\nh\ni\nj\nk\nl\nm\nn\no\np\nq\nr\ns\nt\nu\nv\nw\nx\ny\n"), requiresUnidiff0: true}, + {name: "modify head", want: []byte("a\nc\nd\ne\nf\ng\nh\ni\nj\nk\nl\nm\nn\no\np\nq\nr\ns\nt\nu\nv\nw\nx\ny\n"), requiresUnidiff0: true}, + {name: "delete head", want: []byte("c\nd\ne\nf\ng\nh\ni\nj\nk\nl\nm\nn\no\np\nq\nr\ns\nt\nu\nv\nw\nx\ny\n"), requiresUnidiff0: true}, + {name: "add tail", want: []byte("b\nc\nd\ne\nf\ng\nh\ni\nj\nk\nl\nm\nn\no\np\nq\nr\ns\nt\nu\nv\nw\nx\ny\nz\n")}, + {name: "modify tail", want: []byte("b\nc\nd\ne\nf\ng\nh\ni\nj\nk\nl\nm\nn\no\np\nq\nr\ns\nt\nu\nv\nw\nx\nz\n")}, + {name: "delete tail", want: []byte("b\nc\nd\ne\nf\ng\nh\ni\nj\nk\nl\nm\nn\no\np\nq\nr\ns\nt\nu\nv\nw\nx\n")}, + } + + for _, test := range tests { + test := test + t.Run(test.name+" context "+contextLabel(3), func(t *testing.T) { + t.Parallel() + + patch := buildPatchWithContext(t, "victim", original, test.want, 3) + applied, err := ApplyFile(original, patch) + require.NoError(t, err) + assert.Equal(t, test.want, applied) + }) + } +} + +func TestApplyFile_ZeroContextBoundaryCases(t *testing.T) { + t.Parallel() + + original := []byte("b\nc\nd\ne\nf\ng\nh\ni\nj\nk\nl\nm\nn\no\np\nq\nr\ns\nt\nu\nv\nw\nx\ny\n") + tests := []struct { + name string + want []byte + requiresUnidiff0 bool + }{ + {name: "add head", want: []byte("a\nb\nc\nd\ne\nf\ng\nh\ni\nj\nk\nl\nm\nn\no\np\nq\nr\ns\nt\nu\nv\nw\nx\ny\n"), requiresUnidiff0: true}, + {name: "insert second", want: []byte("b\na\nc\nd\ne\nf\ng\nh\ni\nj\nk\nl\nm\nn\no\np\nq\nr\ns\nt\nu\nv\nw\nx\ny\n"), requiresUnidiff0: true}, + {name: "modify head", want: []byte("a\nc\nd\ne\nf\ng\nh\ni\nj\nk\nl\nm\nn\no\np\nq\nr\ns\nt\nu\nv\nw\nx\ny\n"), requiresUnidiff0: true}, + {name: "delete head", want: []byte("c\nd\ne\nf\ng\nh\ni\nj\nk\nl\nm\nn\no\np\nq\nr\ns\nt\nu\nv\nw\nx\ny\n"), requiresUnidiff0: true}, + {name: "add tail", want: []byte("b\nc\nd\ne\nf\ng\nh\ni\nj\nk\nl\nm\nn\no\np\nq\nr\ns\nt\nu\nv\nw\nx\ny\nz\n")}, + {name: "modify tail", want: []byte("b\nc\nd\ne\nf\ng\nh\ni\nj\nk\nl\nm\nn\no\np\nq\nr\ns\nt\nu\nv\nw\nx\nz\n")}, + {name: "delete tail", want: []byte("b\nc\nd\ne\nf\ng\nh\ni\nj\nk\nl\nm\nn\no\np\nq\nr\ns\nt\nu\nv\nw\nx\n")}, + } + + for _, test := range tests { + test := test + t.Run(test.name, func(t *testing.T) { + t.Parallel() + + patch := buildPatchWithContext(t, "victim", original, test.want, 0) + applied, err := ApplyFile(original, patch) + if test.requiresUnidiff0 { + require.Error(t, err) + return + } + + require.NoError(t, err) + assert.Equal(t, test.want, applied) + }) + } +} + +func TestApplyFile_OffsetPatches(t *testing.T) { + t.Parallel() + + original := []byte("1\n2\n3\n4\n5\n6\n7\n8\n9\n10\n11\n12\n") + target := []byte("1\n2\n3\n4\n5\n6\n7\na\nb\nc\nd\ne\n8\n9\n10\n11\n12\n") + basePatch := buildPatchWithContext(t, "file", original, target, 3) + + tests := []struct { + name string + header string + }{ + {name: "unmodified patch", header: "@@ -5,6 +5,11 @@"}, + {name: "minus offset", header: "@@ -2,6 +2,11 @@"}, + {name: "plus offset", header: "@@ -7,6 +7,11 @@"}, + {name: "big offset", header: "@@ -19,6 +19,11 @@"}, + } + + for _, test := range tests { + test := test + t.Run(test.name, func(t *testing.T) { + t.Parallel() + + patch := rewriteFirstHunkHeader(basePatch, test.header) + applied, err := ApplyFile(original, patch) + require.NoError(t, err) + assert.Equal(t, target, applied) + }) + } +} + +func TestApplyFile_DamagedContextPatchesConflictWithoutFuzz(t *testing.T) { + t.Parallel() + + original := []byte("1\n2\n3\n4\n5\n6\n7\n8\n9\n10\n11\n12\n") + target := []byte("1\n2\n3\n4\n5\n6\n7\na\nb\nc\nd\ne\n8\n9\n10\n11\n12\n") + basePatch := buildPatchWithContext(t, "file", original, target, 3) + damaged := bytes.Replace(basePatch, []byte("\n 5\n"), []byte("\n S\n"), 1) + + tests := []struct { + name string + header string + }{ + {name: "no offset", header: "@@ -5,6 +5,11 @@"}, + {name: "minus offset", header: "@@ -2,6 +2,11 @@"}, + {name: "plus offset", header: "@@ -7,6 +7,11 @@"}, + {name: "big offset", header: "@@ -19,6 +19,11 @@"}, + } + + for _, test := range tests { + test := test + t.Run(test.name, func(t *testing.T) { + t.Parallel() + + patch := rewriteFirstHunkHeader(damaged, test.header) + applied, err := ApplyFileWithConflicts(original, patch) + require.Error(t, err) + require.ErrorIs(t, err, ErrPatchConflict) + assert.Contains(t, string(applied), defaultCurrentConflictMarker) + }) + } +} + +func TestApplyFile_EmptyContextPatches(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + original []byte + target []byte + }{ + { + name: "delete blank-lined middle line", + original: []byte("\n\nA\nB\nC\n\n"), + target: []byte("\n\nA\nC\n\n"), + }, + { + name: "insert middle", + original: []byte("alpha\ncharlie\n"), + target: []byte("alpha\nbravo\ncharlie\n"), + }, + } + + for _, test := range tests { + test := test + t.Run(test.name, func(t *testing.T) { + t.Parallel() + + patch := buildPatchWithContext(t, "file", test.original, test.target, 0) + _, err := ApplyFile(test.original, patch) + require.Error(t, err) + }) + } +} + +func TestApplyFile_EmptyContextNoTrailingNewlinePatches(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + original []byte + target []byte + patch []byte + }{ + { + name: "append no newline tail", + original: []byte("\n\nA\nC\n\n"), + target: []byte("\n\nA\nC\n\nQ"), + patch: []byte(`diff --git a/file b/file +--- a/file ++++ b/file +@@ -6,0 +7 @@ ++Q +\ No newline at end of file +`), + }, + { + name: "modify tail no newline", + original: []byte("alpha\nbravo"), + target: []byte("alpha\ncharlie"), + patch: []byte(`diff --git a/file b/file +--- a/file ++++ b/file +@@ -2 +2 @@ +-bravo +\ No newline at end of file ++charlie +\ No newline at end of file +`), + }, + } + + for _, test := range tests { + test := test + t.Run(test.name, func(t *testing.T) { + t.Parallel() + + applied, err := applyFileWithOptions(test.original, test.patch, applyOptions{}) + require.NoError(t, err) + assert.Equal(t, test.target, applied.Content) + }) + } +} + +func TestApplyFileWithOptions_ReducesContextToRelocateHunk(t *testing.T) { + t.Parallel() + + patchData := mustReadFile(t, filepath.Join("testdata", "parity", "context-reduced-leading", "patch")) + shiftedPristine := mustReadFile(t, filepath.Join("testdata", "parity", "context-reduced-leading", "src")) + want := mustReadFile(t, filepath.Join("testdata", "parity", "context-reduced-leading", "out")) + + _, err := applyFileWithOptions(shiftedPristine, patchData, applyOptions{}) + require.Error(t, err) + + applied, err := applyFileWithOptions(shiftedPristine, patchData, applyOptions{ + MinContext: 1, + MinContextSet: true, + }) + require.NoError(t, err) + assert.Equal(t, want, applied.Content) +} + +func TestApplyFile_RelocatesToNearestMatchingBlock(t *testing.T) { + t.Parallel() + + original := []byte("header\nanchor\ncommon\nvalue-old\nend\ngap\nanchor\ncommon\nvalue-old\nend\n") + target := []byte("header\nanchor\ncommon\nvalue-old\nend\ngap\nanchor\ncommon\nvalue-new\nend\n") + shifted := []byte("header\nanchor\ncommon\nvalue-old\nend\ngap\nextra\nanchor\ncommon\nvalue-old\nend\n") + + patch := buildPatchWithContext(t, "dup.txt", original, target, 1) + applied, err := ApplyFile(shifted, patch) + require.NoError(t, err) + assert.Equal(t, []byte("header\nanchor\ncommon\nvalue-old\nend\ngap\nextra\nanchor\ncommon\nvalue-new\nend\n"), applied) +} + +func TestApplyFile_MultipleHunks(t *testing.T) { + t.Parallel() + + original := []byte("line 1\nline 2\nline 3\nline 4\nline 5\nline 6\nline 7\nline 8\n") + target := []byte("line 1\nline two\nline 3\nline 4\nline 5\nline six\nline 7\nline 8\n") + + patch := buildPatchWithContext(t, "multi.txt", original, target, 1) + applied, err := ApplyFile(original, patch) + require.NoError(t, err) + assert.Equal(t, target, applied) +} + +func TestApplyFile_MultipleHunksOneConflict(t *testing.T) { + t.Parallel() + + original := []byte("line 1\nline 2\nline 3\nline 4\nline 5\nline 6\nline 7\nline 8\n") + target := []byte("line 1\nline two\nline 3\nline 4\nline 5\nline six\nline 7\nline 8\n") + current := []byte("line 1\nline 2\nline 3\nline 4\nline 5\nline VI\nline 7\nline 8\n") + + patch := buildPatchWithContext(t, "multi.txt", original, target, 1) + applied, err := ApplyFileWithConflicts(current, patch) + require.Error(t, err) + require.ErrorIs(t, err, ErrPatchConflict) + assert.Contains(t, string(applied), "line two") + assert.Contains(t, string(applied), defaultCurrentConflictMarker) + assert.Contains(t, string(applied), "line VI") + assert.Contains(t, string(applied), "line six") +} + +func TestApplyFile_DefaultsToDirectApply(t *testing.T) { + t.Parallel() + + base := []byte("package testsdk\n\ntype Status struct{}\n") + current := []byte("package testsdk\n\ntype Status struct {\n\tValue string\n}\n") + patchData := buildPatch(t, "status.go", base, []byte("package testsdk\n\ntype Status struct{}\n\nfunc (s *Status) String() string {\n\treturn \"custom\"\n}\n")) + + applied, err := ApplyFile(current, patchData) + require.Error(t, err) + require.ErrorIs(t, err, ErrPatchConflict) + assert.Equal(t, current, applied) + assert.NotContains(t, string(applied), defaultCurrentConflictMarker) + assert.NotContains(t, string(applied), defaultIncomingConflictMarker) +} + +func TestApplyFileWithConflicts_ReturnsConflictMarkers(t *testing.T) { + t.Parallel() + + base := []byte("package testsdk\n\ntype Status struct{}\n") + current := []byte("package testsdk\n\ntype Status struct {\n\tValue string\n}\n") + patchData := buildPatch(t, "status.go", base, []byte("package testsdk\n\ntype Status struct{}\n\nfunc (s *Status) String() string {\n\treturn \"custom\"\n}\n")) + + applied, err := ApplyFileWithConflicts(current, patchData) + require.Error(t, err) + require.ErrorIs(t, err, ErrPatchConflict) + assert.Contains(t, string(applied), defaultCurrentConflictMarker) + assert.Contains(t, string(applied), defaultIncomingConflictMarker) + assert.Contains(t, string(applied), "func (s *Status) String() string") +} + +func TestApplyFileWithOptions_RendersNeutralConflictMarkers(t *testing.T) { + t.Parallel() + + base := []byte("package testsdk\n\ntype Status struct{}\n") + current := []byte("package testsdk\n\ntype Status struct {\n\tValue string\n}\n") + patchData := buildPatch(t, "status.go", base, []byte("package testsdk\n\ntype Status struct{}\n\nfunc (s *Status) String() string {\n\treturn \"custom\"\n}\n")) + + result, err := applyFileWithOptions(current, patchData, applyOptions{ + Mode: applyModeMerge, + }) + require.Error(t, err) + var applyErr *applyError + require.ErrorAs(t, err, &applyErr) + assert.Equal(t, 0, result.DirectMisses) + assert.Equal(t, 1, result.MergeConflicts) + assert.Contains(t, string(result.Content), "<<<<<<<") + assert.NotContains(t, string(result.Content), "Current (Your changes)") + assert.NotContains(t, string(result.Content), "Generated by Speakeasy") + assert.NotEmpty(t, result.Reject) + assert.True(t, strings.HasPrefix(string(result.Reject), "diff a/status.go b/status.go\t(rejected hunks)\n@@")) +} + +func TestApplyFileWithOptions_DirectModeReportsMissesWithoutMarkers(t *testing.T) { + t.Parallel() + + base := []byte("package testsdk\n\ntype Status struct{}\n") + current := []byte("package testsdk\n\ntype Status struct {\n\tValue string\n}\n") + patchData := buildPatch(t, "status.go", base, []byte("package testsdk\n\ntype Status struct{}\n\nfunc (s *Status) String() string {\n\treturn \"custom\"\n}\n")) + + result, err := applyFileWithOptions(current, patchData, applyOptions{ + Mode: applyModeApply, + }) + require.Error(t, err) + var applyErr *applyError + require.ErrorAs(t, err, &applyErr) + assert.Equal(t, 1, result.DirectMisses) + assert.Equal(t, 0, result.MergeConflicts) + assert.Equal(t, current, result.Content) + assert.NotContains(t, string(result.Content), "<<<<<<<") + assert.NotContains(t, string(result.Content), ">>>>>>>") + assert.NotEmpty(t, result.Reject) + assert.True(t, strings.HasPrefix(string(result.Reject), "diff a/status.go b/status.go\t(rejected hunks)\n@@")) +} + +func TestPatchApply_AllowsCustomConflictLabels(t *testing.T) { + t.Parallel() + + base := []byte("package testsdk\n\ntype Status struct{}\n") + current := []byte("package testsdk\n\ntype Status struct {\n\tValue string\n}\n") + patchData := buildPatch(t, "status.go", base, []byte("package testsdk\n\ntype Status struct{}\n\nfunc (s *Status) String() string {\n\treturn \"custom\"\n}\n")) + + applier := newPatchApply(applyOptions{ + Mode: applyModeMerge, + ConflictLabels: conflictLabels{ + Current: "Current (Your changes)", + Incoming: "New (Generated by Speakeasy)", + }, + }) + + applied, err := applier.applyFile(current, patchData) + require.Error(t, err) + assert.Contains(t, string(applied), "<<<<<<< Current (Your changes)") + assert.Contains(t, string(applied), ">>>>>>> New (Generated by Speakeasy)") +} + +func TestApplyFileWithOptions_IgnoreWhitespaceAppliesThroughContextDrift(t *testing.T) { + t.Parallel() + + original := []byte("alpha\n beta\ncharlie\n") + target := []byte("alpha\n BETA\ncharlie\n") + patchData := buildPatchWithContext(t, "whitespace.txt", original, target, 1) + current := []byte("alpha\n beta\ncharlie\n") + + _, err := applyFileWithOptions(current, patchData, applyOptions{ + Mode: applyModeMerge, + }) + require.Error(t, err) + + applied, err := applyFileWithOptions(current, patchData, applyOptions{ + Mode: applyModeMerge, + IgnoreWhitespace: true, + }) + require.NoError(t, err) + assert.Equal(t, target, applied.Content) + assert.Equal(t, 0, applied.DirectMisses) + assert.Equal(t, 0, applied.MergeConflicts) +} + +func TestApplyFile_RejectsAlreadyAppliedBeginningAndEndingPatches(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + current []byte + patch []byte + }{ + { + name: "ending patch", + current: []byte("a\nb\nc\n"), + patch: []byte(`diff --git a/file b/file +--- a/file ++++ b/file +@@ -1,2 +1,3 @@ + a + b ++c +`), + }, + { + name: "beginning patch", + current: []byte("a\nb\nc\n"), + patch: []byte(`diff --git a/file b/file +--- a/file ++++ b/file +@@ -1,2 +1,3 @@ ++a + b + c +`), + }, + } + + for _, test := range tests { + test := test + t.Run(test.name, func(t *testing.T) { + t.Parallel() + + applied, err := ApplyFileWithConflicts(test.current, test.patch) + require.Error(t, err) + require.ErrorIs(t, err, ErrPatchConflict) + assert.Contains(t, string(applied), defaultCurrentConflictMarker) + }) + } +} + +func TestApplyFile_RejectsAlreadyAppliedMiddlePatches(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + current []byte + patch []byte + }{ + { + name: "middle insertion", + current: []byte("start\nmiddle\ninserted\nend\n"), + patch: []byte(`diff --git a/file b/file +--- a/file ++++ b/file +@@ -1,3 +1,4 @@ + start + middle ++inserted + end +`), + }, + { + name: "replacement already applied", + current: []byte("start\nnew value\nend\n"), + patch: []byte(`diff --git a/file b/file +--- a/file ++++ b/file +@@ -1,3 +1,3 @@ + start +-old value ++new value + end +`), + }, + } + + for _, test := range tests { + test := test + t.Run(test.name, func(t *testing.T) { + t.Parallel() + + applied, err := ApplyFileWithConflicts(test.current, test.patch) + require.Error(t, err) + require.ErrorIs(t, err, ErrPatchConflict) + assert.Contains(t, string(applied), defaultCurrentConflictMarker) + }) + } +} + +func TestApplyFile_RejectsMultiFileDiff(t *testing.T) { + t.Parallel() + + patchData := []byte(`diff --git a/sdk.go b/sdk.go +--- a/sdk.go ++++ b/sdk.go +@@ -1,3 +1,4 @@ + package testsdk + ++// sdk custom + type SDK struct{} +diff --git a/models/components/pet.go b/models/components/pet.go +--- a/models/components/pet.go ++++ b/models/components/pet.go +@@ -1,3 +1,4 @@ + package components + ++// pet custom + type Pet struct{} +`) + + _, err := ApplyFile([]byte("package testsdk\n\ntype SDK struct{}\n"), patchData) + require.Error(t, err) + assert.Contains(t, err.Error(), "expected exactly 1 file diff") +} + +func TestApplyFile_RejectsMalformedPatches(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + patch []byte + wantErr string + }{ + { + name: "non patch input", + patch: []byte(`I am not a patch +I look nothing like a patch +git apply must fail +`), + wantErr: "unsupported patch syntax", + }, + { + name: "invalid hunk header", + patch: []byte(`diff --git a/file b/file +--- a/file ++++ b/file +@@ -x,1 +1,1 @@ +-a ++b +`), + wantErr: "unsupported patch syntax", + }, + { + name: "unexpected hunk line prefix", + patch: []byte(`diff --git a/file b/file +--- a/file ++++ b/file +@@ -1,1 +1,1 @@ +!a +`), + wantErr: "unexpected hunk line", + }, + { + name: "no newline marker without preceding line", + patch: []byte(`diff --git a/file b/file +--- a/file ++++ b/file +@@ -1,1 +1,1 @@ +\ No newline at end of file +`), + wantErr: "unexpected no-newline marker without a preceding patch line", + }, + { + name: "unsupported header garbage", + patch: []byte(`diff --git a/file b/file +copy from file +copy to file-copy +--- a/file ++++ b/file-copy +@@ -1,1 +1,1 @@ +-a ++b +`), + wantErr: "unsupported patch syntax", + }, + } + + for _, test := range tests { + test := test + t.Run(test.name, func(t *testing.T) { + t.Parallel() + + _, err := ApplyFile([]byte("a\n"), test.patch) + require.Error(t, err) + assert.Contains(t, err.Error(), test.wantErr) + }) + } +} + +func TestApplyFile_RejectsAdditionalUnsupportedPatches(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + patch []byte + wantErr string + }{ + { + name: "new file mode with hunk", + patch: []byte(`diff --git a/new.go b/new.go +new file mode 100644 +--- /dev/null ++++ b/new.go +@@ -0,0 +1,2 @@ ++package test ++ +`), + wantErr: "file mode changes are not supported", + }, + { + name: "deleted file mode with hunk", + patch: []byte(`diff --git a/old.go b/old.go +deleted file mode 100644 +--- a/old.go ++++ /dev/null +@@ -1,1 +0,0 @@ +-package test +`), + wantErr: "patches may only modify existing files", + }, + { + name: "binary files differ", + patch: []byte(`diff --git a/file.bin b/file.bin +Binary files a/file.bin and b/file.bin differ +`), + wantErr: "patch contains no hunks", + }, + { + name: "create and rename", + patch: []byte(`diff --git a/1 b/2 +new file mode 100644 +rename from 1 +rename to 2 +`), + wantErr: "file mode changes are not supported", + }, + } + + for _, test := range tests { + test := test + t.Run(test.name, func(t *testing.T) { + t.Parallel() + + _, err := ApplyFile([]byte("package test\n"), test.patch) + require.Error(t, err) + assert.Contains(t, err.Error(), test.wantErr) + }) + } +} + +func TestApplyFile_ShrinkFailures(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + original []byte + target []byte + current []byte + }{ + { + name: "preimage larger than source", + original: []byte("1\n2\n3\n4\n5\n6\n7\n8\n999999\nA\nB\nC\nD\nE\nF\nG\nH\nI\nJ\n\n"), + target: []byte("11\n2\n3\n4\n5\n6\n7\n8\n9\nA\nB\nC\nD\nE\nF\nG\nHH\nI\nJ\n\n"), + current: []byte("2\n3\n4\n5\n6\n7\n8\n999999\nA\nB\nC\nD\nE\nF\nG\nH\nI\nJ\n"), + }, + { + name: "near eof overrun", + original: []byte("a\nb\nc\nd\ne\n"), + target: []byte("a\nb\nc\nd\nz\n"), + current: []byte("a\nb\nc\nd\n"), + }, + } + + for _, test := range tests { + test := test + t.Run(test.name, func(t *testing.T) { + t.Parallel() + + patch := buildPatch(t, "F", test.original, test.target) + applied, err := ApplyFileWithConflicts(test.current, patch) + require.Error(t, err) + require.ErrorIs(t, err, ErrPatchConflict) + assert.Contains(t, string(applied), defaultCurrentConflictMarker) + }) + } +} + +func TestApplyFile_CRLFPreservation(t *testing.T) { + t.Parallel() + + pristine := []byte("alpha\r\nbeta\r\n") + target := []byte("alpha\r\nbravo\r\n") + patch := buildPatch(t, "crlf.txt", pristine, target) + + applied, err := ApplyFile(pristine, patch) + require.NoError(t, err) + assert.Equal(t, target, applied) +} + +func loadApplyFixture(t *testing.T, name string) applyFixtureFiles { + t.Helper() + + load := func(ext string) []byte { + t.Helper() + + path := filepath.Join("testdata", "apply", name+"."+ext) + data, err := os.ReadFile(path) + if errors.Is(err, os.ErrNotExist) { + return nil + } + require.NoError(t, err) + return data + } + + return applyFixtureFiles{ + src: load("src"), + patch: load("patch"), + out: load("out"), + } +} + +func mustReadFile(t *testing.T, path string) []byte { + t.Helper() + + data, err := os.ReadFile(path) + require.NoError(t, err) + return data +} + +func expectedApplyFixtureOutput(t *testing.T, files applyFixtureFiles) []byte { + t.Helper() + + if files.out == nil { + return nil + } + + parsed, errs := parse(string(files.patch)) + require.Empty(t, errs) + require.Len(t, parsed.FileDiff, 1) + require.Len(t, parsed.FileDiff[0].Hunks, 1) + + hunk := parsed.FileDiff[0].Hunks[0] + start := hunk.StartLineNumberOld - 1 + if start < 0 { + start = 0 + } + + sourceLines := splitBytesLines(files.src) + end := start + hunk.CountOld + if end > len(sourceLines) { + end = len(sourceLines) + } + + expected := append([]byte{}, files.out...) + for _, line := range sourceLines[end:] { + expected = append(expected, line...) + } + return expected +} + +func splitBytesLines(content []byte) [][]byte { + if len(content) == 0 { + return nil + } + + lines := bytes.SplitAfter(content, []byte("\n")) + if len(lines) > 0 && len(lines[len(lines)-1]) == 0 { + lines = lines[:len(lines)-1] + } + return lines +} + +func contextLabel(context int) string { + if context == 0 { + return "0" + } + return "3" +} + +func rewriteFirstHunkHeader(patch []byte, header string) []byte { + lines := bytes.Split(patch, []byte("\n")) + for i, line := range lines { + if bytes.HasPrefix(line, []byte("@@ ")) { + lines[i] = []byte(header) + return bytes.Join(lines, []byte("\n")) + } + } + return patch +} + +func buildPatch(t *testing.T, path string, pristine, materialized []byte) []byte { + t.Helper() + return buildPatchWithContext(t, path, pristine, materialized, 3) +} + +func buildPatchWithContext(t *testing.T, path string, pristine, materialized []byte, context int) []byte { + t.Helper() + + diff, err := difflib.GetUnifiedDiffString(difflib.UnifiedDiff{ + A: difflib.SplitLines(string(pristine)), + B: difflib.SplitLines(string(materialized)), + FromFile: "a/" + path, + ToFile: "b/" + path, + Context: context, + }) + require.NoError(t, err) + require.NotEmpty(t, diff) + + return append([]byte("diff --git a/"+path+" b/"+path+"\n"), []byte(diff)...) +} + +func TestApplyFile_PreservesExactBytes(t *testing.T) { + t.Parallel() + + files := loadApplyFixture(t, "text_fragment_change_single_noeol") + applied, err := ApplyFile(files.src, files.patch) + require.NoError(t, err) + assert.True(t, bytes.Equal(files.out, applied)) +} diff --git a/model.go b/model.go new file mode 100644 index 0000000..6da10a6 --- /dev/null +++ b/model.go @@ -0,0 +1,168 @@ +package git_diff_parser + +import ( + "fmt" + "strings" +) + +type contentChangeType string + +const ( + contentChangeTypeAdd contentChangeType = "add" + contentChangeTypeDelete contentChangeType = "delete" + contentChangeTypeModify contentChangeType = "modify" + contentChangeTypeNOOP contentChangeType = "" +) + +// contentChange is a part of the line that starts with ` `, `-`, `+`. +// Consecutive contentChange build a line. +// A `~` is a special case of contentChange that is used to indicate a new line. +type contentChange struct { + Type contentChangeType `json:"type"` + From string `json:"from"` + To string `json:"to"` +} + +type changeList []contentChange + +// hunkLine keeps a normalized, apply-friendly view of a hunk line. +type hunkLine struct { + Kind byte `json:"kind"` + Text string `json:"text"` + HasNewline bool `json:"has_newline"` + OldEOF bool `json:"old_eof,omitempty"` + NewEOF bool `json:"new_eof,omitempty"` +} + +// hunk is a line that starts with @@. +// Each hunk shows one area where the files differ. +// Unified format hunks look like this: +// @@ from-file-line-numbers to-file-line-numbers @@ +// +// line-from-either-file +// line-from-either-file… +// +// If a hunk contains just one line, only its start line number appears. Otherwise its line numbers look like 'start,count'. An empty hunk is considered to start at the line that follows the hunk. +type hunk struct { + ChangeList changeList `json:"change_list"` + Lines []hunkLine `json:"lines,omitempty"` + StartLineNumberOld int `json:"start_line_number_old"` + CountOld int `json:"count_old"` + StartLineNumberNew int `json:"start_line_number_new"` + CountNew int `json:"count_new"` +} + +func (l *hunkLine) markNoNewline() { + l.HasNewline = false +} + +func (h *hunk) markEOFMarkers() { + oldSeen := 0 + newSeen := 0 + + for i := range h.Lines { + line := &h.Lines[i] + if line.Kind == ' ' || line.Kind == '-' { + oldSeen++ + } + if line.Kind == ' ' || line.Kind == '+' { + newSeen++ + } + if !line.HasNewline || strings.TrimSuffix(line.Text, "\r") != "" { + continue + } + + line.OldEOF = (line.Kind == ' ' || line.Kind == '-') && oldSeen == h.CountOld + line.NewEOF = (line.Kind == ' ' || line.Kind == '+') && newSeen == h.CountNew + } +} + +func (changes *changeList) isSignificant() bool { + for _, change := range *changes { + if change.Type != contentChangeTypeNOOP { + return true + } + } + return false +} + +func (h *hunk) GoString() string { + return fmt.Sprintf( + "git_diff_parser.Hunk{ChangeList:%#v, StartLineNumberOld:%d, CountOld:%d, StartLineNumberNew:%d, CountNew:%d}", + h.ChangeList, + h.StartLineNumberOld, + h.CountOld, + h.StartLineNumberNew, + h.CountNew, + ) +} + +type fileDiffType string + +const ( + fileDiffTypeAdded fileDiffType = "add" + fileDiffTypeDeleted fileDiffType = "delete" + fileDiffTypeModified fileDiffType = "modify" +) + +type binaryDeltaType string + +const ( + binaryDeltaTypeLiteral binaryDeltaType = "literal" + binaryDeltaTypeDelta binaryDeltaType = "delta" +) + +type binaryPatch struct { + Type binaryDeltaType `json:"type"` + Count int + Content string +} + +// fileDiff Source of truth: https://github.com/git/git/blob/master/diffcore.h#L106 +// Implemented in https://github.com/git/git/blob/master/diff.c#L3496 +type fileDiff struct { + FromFile string `json:"from_file"` + ToFile string `json:"to_file"` + Type fileDiffType `json:"type"` + IsBinary bool `json:"is_binary"` + OldMode string `json:"old_mode,omitempty"` + NewMode string `json:"new_mode,omitempty"` + IndexOld string `json:"index_old,omitempty"` + IndexNew string `json:"index_new,omitempty"` + IndexMode string `json:"index_mode,omitempty"` + SimilarityIndex int `json:"similarity_index,omitempty"` + DissimilarityIndex int `json:"dissimilarity_index,omitempty"` + RenameFrom string `json:"rename_from,omitempty"` + RenameTo string `json:"rename_to,omitempty"` + CopyFrom string `json:"copy_from,omitempty"` + CopyTo string `json:"copy_to,omitempty"` + Hunks []hunk `json:"hunks"` + BinaryPatch []binaryPatch `json:"binary_patch"` +} + +func (fd *fileDiff) GoString() string { + var hunksStr string + if fd.Hunks == nil { + hunksStr = "[]git_diff_parser.Hunk(nil)" + } else { + hunks := make([]string, len(fd.Hunks)) + for i := range fd.Hunks { + hunks[i] = fd.Hunks[i].GoString() + } + hunksStr = "[]git_diff_parser.Hunk{" + strings.Join(hunks, ", ") + "}" + } + return fmt.Sprintf( + "&git_diff_parser.FileDiff{FromFile:%#v, ToFile:%#v, Type:%#v, IsBinary:%t, NewMode:%#v, Hunks:%s, BinaryPatch:%#v}", + fd.FromFile, + fd.ToFile, + fd.Type, + fd.IsBinary, + fd.NewMode, + hunksStr, + fd.BinaryPatch, + ) +} + +type diff struct { + FileDiff []fileDiff `json:"file_diff"` +} diff --git a/parity_test.go b/parity_test.go new file mode 100644 index 0000000..3c99c56 --- /dev/null +++ b/parity_test.go @@ -0,0 +1,414 @@ +//go:build parity + +package git_diff_parser + +import ( + "bytes" + "encoding/json" + "errors" + "io/fs" + "os" + "os/exec" + "path/filepath" + "sort" + "strconv" + "strings" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +type parityFixture struct { + GitArgs []string `json:"gitArgs"` + ExpectConflict bool `json:"expectConflict"` + CheckReject bool `json:"checkReject"` + IgnoreWhitespace bool `json:"ignoreWhitespace"` + SkipLibrary bool `json:"skipLibrary"` + ExpectGitError bool `json:"expectGitError"` + SrcFiles map[string]string `json:"srcFiles"` + OutFiles map[string]string `json:"outFiles"` + SrcModes map[string]string `json:"srcModes"` + OutModes map[string]string `json:"outModes"` +} + +type parityCase struct { + name string + src []byte + patch []byte + out []byte + rej []byte + srcTree parityTree + outTree parityTree + fixture parityFixture +} + +type parityFile struct { + content []byte + mode fs.FileMode +} + +type parityTree map[string]parityFile + +func TestApplyFile_ParityCorpus(t *testing.T) { + if testing.Short() { + t.Skip("parity corpus is an integration test stream") + } + + requireGitBinary(t) + + cases := loadParityCases(t) + require.NotEmpty(t, cases) + + for _, tc := range cases { + tc := tc + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + + oracles := runGitApplyOracles(t, tc) + if tc.fixture.SkipLibrary { + assertParityTree(t, tc.outTree, oracles.tree) + if tc.fixture.ExpectGitError { + require.Error(t, oracles.exitErr) + } else { + require.NoError(t, oracles.exitErr) + } + return + } + + mergeResult, mergeErr := runLibraryApply(t, tc, false) + + if tc.fixture.ExpectConflict { + require.Error(t, mergeErr) + var applyErr *applyError + require.ErrorAs(t, mergeErr, &applyErr) + assert.True(t, errors.Is(mergeErr, ErrPatchConflict)) + assert.Equal(t, tc.src, oracles.applied) + assert.Contains(t, string(mergeResult.Content), "<<<<<<< Current") + assert.Contains(t, string(mergeResult.Content), ">>>>>>> Incoming patch") + if len(tc.out) > 0 { + for _, line := range bytes.Split(bytes.TrimSpace(tc.out), []byte("\n")) { + if len(line) == 0 { + continue + } + assert.Contains(t, string(mergeResult.Content), string(line)) + } + } + assertParityTree(t, tc.srcTree, oracles.tree) + } else { + require.NoError(t, mergeErr) + require.Equal(t, oracles.applied, mergeResult.Content) + if len(tc.out) > 0 { + assert.Equal(t, tc.out, mergeResult.Content) + } + assertParityTree(t, tc.outTree, oracles.tree) + } + + if tc.fixture.CheckReject { + rejectOracles := runGitApplyOracles(t, tc, "--reject") + require.True(t, rejectOracles.rejected) + rejectResult, rejectErr := runLibraryApply(t, tc, true) + require.Error(t, rejectErr) + var applyErr *applyError + require.ErrorAs(t, rejectErr, &applyErr) + require.NotEqual(t, tc.src, rejectOracles.applied) + assert.Equal(t, tc.src, rejectResult.Content) + if len(tc.rej) > 0 { + assert.Equal(t, tc.rej, trimGitRejectHeader(rejectResult.Reject)) + } else { + assert.Equal(t, rejectOracles.rej, rejectResult.Reject) + } + if len(tc.out) > 0 { + assert.Equal(t, tc.out, rejectOracles.applied) + } + require.NotEmpty(t, rejectOracles.rej) + assert.Contains(t, string(rejectOracles.rej), "rejected hunks") + } + }) + } +} + +func runLibraryApply(t *testing.T, tc parityCase, rejectMode bool) (applyResult, error) { + t.Helper() + + options := defaultMergeApplyOptions() + options.IgnoreWhitespace = tc.fixture.IgnoreWhitespace + if rejectMode { + options = defaultApplyOptions() + options.IgnoreWhitespace = tc.fixture.IgnoreWhitespace + } + if minContext, ok := fixtureContextArg(tc.fixture); ok { + options.MinContext = minContext + options.MinContextSet = true + } + + return applyFileWithOptions(tc.src, tc.patch, options) +} + +func trimGitRejectHeader(rej []byte) []byte { + if idx := bytes.IndexByte(rej, '\n'); idx >= 0 { + return rej[idx+1:] + } + return rej +} + +func fixtureContextArg(fixture parityFixture) (int, bool) { + for _, candidate := range fixture.GitArgs { + if !strings.HasPrefix(candidate, "-C") || len(candidate) <= 2 { + continue + } + value, err := strconv.Atoi(strings.TrimPrefix(candidate, "-C")) + if err == nil { + return value, true + } + } + return 0, false +} + +type gitApplyOracle struct { + applied []byte + tree parityTree + rej []byte + rejected bool + exitErr error +} + +func runGitApplyOracles(t *testing.T, tc parityCase, extraArgs ...string) gitApplyOracle { + t.Helper() + + dir := t.TempDir() + writeParityTree(t, dir, tc.srcTree) + require.NoError(t, os.WriteFile(filepath.Join(dir, "patch.diff"), tc.patch, 0o600)) + + args := []string{"apply", "--whitespace=nowarn"} + args = append(args, tc.fixture.GitArgs...) + args = append(args, extraArgs...) + args = append(args, "patch.diff") + + cmd := exec.Command("git", args...) + cmd.Dir = dir + output, err := cmd.CombinedOutput() + oracles := gitApplyOracle{exitErr: err} + + if err != nil { + var exitErr *exec.ExitError + if errors.As(err, &exitErr) { + oracles.rejected = exitErr.ExitCode() != 0 + } else { + require.NoError(t, err, "git apply failed to start: %s", output) + } + } else { + oracles.rejected = false + } + + applied, readErr := os.ReadFile(filepath.Join(dir, "file.txt")) + if readErr == nil { + oracles.applied = applied + } + + rej, rejErr := os.ReadFile(filepath.Join(dir, "file.txt.rej")) + if rejErr == nil { + oracles.rej = rej + } + + oracles.tree = collectParityTree(t, dir) + + if len(output) > 0 && err == nil { + // git apply may emit successful warnings like context reduction; tree state is the oracle here. + } + + return oracles +} + +func loadParityCases(t *testing.T) []parityCase { + t.Helper() + + root := filepath.Join("testdata", "parity") + entries, err := os.ReadDir(root) + require.NoError(t, err) + + cases := make([]parityCase, 0, len(entries)) + for _, entry := range entries { + if !entry.IsDir() { + continue + } + + dir := filepath.Join(root, entry.Name()) + fixture := readParityFixture(t, filepath.Join(dir, "fixture.json")) + srcTree, src, outTree, out := readParityTrees(t, dir, fixture) + cases = append(cases, parityCase{ + name: entry.Name(), + src: src, + patch: readParityFile(t, filepath.Join(dir, "patch")), + out: out, + rej: readParityFileMaybe(t, filepath.Join(dir, "rej")), + srcTree: srcTree, + outTree: outTree, + fixture: fixture, + }) + } + + sort.Slice(cases, func(i, j int) bool { + return cases[i].name < cases[j].name + }) + + return cases +} + +func readParityFixture(t *testing.T, path string) parityFixture { + t.Helper() + + raw := readParityFile(t, path) + var fixture parityFixture + require.NoError(t, json.Unmarshal(raw, &fixture)) + return fixture +} + +func readParityFile(t *testing.T, path string) []byte { + t.Helper() + + data, err := os.ReadFile(path) + require.NoError(t, err) + return data +} + +func readParityFileMaybe(t *testing.T, path string) []byte { + t.Helper() + + data, err := os.ReadFile(path) + if errors.Is(err, os.ErrNotExist) { + return nil + } + require.NoError(t, err) + return data +} + +func readParityTrees(t *testing.T, dir string, fixture parityFixture) (parityTree, []byte, parityTree, []byte) { + t.Helper() + + srcTree := loadParityTree(t, filepath.Join(dir, "src"), fixture.SrcFiles, fixture.SrcModes) + outTree := loadParityTree(t, filepath.Join(dir, "out"), fixture.OutFiles, fixture.OutModes) + return srcTree, treeBytes(srcTree), outTree, treeBytes(outTree) +} + +func loadParityTree(t *testing.T, legacyPath string, files map[string]string, modes map[string]string) parityTree { + t.Helper() + + if len(files) > 0 { + tree := make(parityTree, len(files)) + for path, content := range files { + tree[path] = parityFile{ + content: []byte(content), + mode: parseParityMode(modes[path]), + } + } + return tree + } + + legacy := readParityFileMaybe(t, legacyPath) + if legacy == nil { + return nil + } + return parityTree{ + "file.txt": {content: legacy}, + } +} + +func parseParityMode(raw string) fs.FileMode { + if raw == "" { + return 0 + } + if len(raw) >= 3 { + raw = raw[len(raw)-3:] + } + switch raw { + case "644": + return 0o644 + case "755": + return 0o755 + default: + return 0 + } +} + +func treeBytes(tree parityTree) []byte { + if len(tree) != 1 { + return nil + } + file, ok := tree["file.txt"] + if !ok { + return nil + } + return file.content +} + +func writeParityTree(t *testing.T, root string, tree parityTree) { + t.Helper() + + for path, file := range tree { + fullPath := filepath.Join(root, filepath.FromSlash(path)) + require.NoError(t, os.MkdirAll(filepath.Dir(fullPath), 0o755)) + require.NoError(t, os.WriteFile(fullPath, file.content, 0o600)) + if file.mode != 0 { + require.NoError(t, os.Chmod(fullPath, file.mode)) + } + } +} + +func collectParityTree(t *testing.T, root string) parityTree { + t.Helper() + + tree := make(parityTree) + require.NoError(t, filepath.WalkDir(root, func(path string, d fs.DirEntry, err error) error { + require.NoError(t, err) + if path == root || d.IsDir() { + return nil + } + base := filepath.Base(path) + if base == "patch.diff" || strings.HasSuffix(base, ".rej") { + return nil + } + rel, err := filepath.Rel(root, path) + require.NoError(t, err) + content, err := os.ReadFile(path) + require.NoError(t, err) + info, err := d.Info() + require.NoError(t, err) + tree[filepath.ToSlash(rel)] = parityFile{ + content: content, + mode: info.Mode().Perm(), + } + return nil + })) + return tree +} + +func assertParityTree(t *testing.T, want, got parityTree) { + t.Helper() + + if len(want) == 0 { + assert.Len(t, got, 0) + return + } + + require.Len(t, got, len(want)) + for path, expected := range want { + actual, ok := got[path] + require.True(t, ok, "missing file %s", path) + assert.Equal(t, expected.content, actual.content, "content mismatch for %s", path) + if expected.mode != 0 { + assert.Equal(t, expected.mode, actual.mode, "mode mismatch for %s", path) + } + } + for path := range got { + _, ok := want[path] + assert.True(t, ok, "unexpected file %s", path) + } +} + +func requireGitBinary(t *testing.T) { + t.Helper() + + _, err := exec.LookPath("git") + require.NoError(t, err) +} diff --git a/parser.go b/parser.go index 70cdcdd..cca261a 100644 --- a/parser.go +++ b/parser.go @@ -8,57 +8,14 @@ import ( "strings" ) -var ErrUnhandled = errors.New("unhandled git diff syntax") +var errUnhandled = errors.New("unhandled git diff syntax") -type ContentChangeType string - -const ( - ContentChangeTypeAdd ContentChangeType = "add" - ContentChangeTypeDelete ContentChangeType = "delete" - ContentChangeTypeModify ContentChangeType = "modify" - ContentChangeTypeNOOP ContentChangeType = "" -) - -// ContentChange is a part of the line that starts with ` `, `-`, `+` -// Consecutive ContentChange build a line. -// A `~` is a special case of ContentChange that is used to indicate a new line. -type ContentChange struct { - Type ContentChangeType `json:"type"` - From string `json:"from"` - To string `json:"to"` -} - -type ChangeList []ContentChange - -// Hunk is a line that starts with @@. -// Each hunk shows one area where the files differ -// Unified format hunks look like this: -// @@ from-file-line-numbers to-file-line-numbers @@ -// -// line-from-either-file -// line-from-either-file… -// -// If a hunk contains just one line, only its start line number appears. Otherwise its line numbers look like ā€˜start,count’. An empty hunk is considered to start at the line that follows the hunk. -type Hunk struct { - ChangeList ChangeList `json:"change_list"` - StartLineNumberOld int `json:"start_line_number_old"` - CountOld int `json:"count_old"` - StartLineNumberNew int `json:"start_line_number_new"` - CountNew int `json:"count_new"` -} - -func (changes *ChangeList) IsSignificant() bool { - for _, change := range *changes { - if change.Type != ContentChangeTypeNOOP { - return true - } - } - return false -} - -func NewHunk(line string) (Hunk, error) { +func newHunk(line string) (hunk, error) { namedHunkRegex := regexp.MustCompile(`(?m)^@@ -(?P\d+),?(?P\d+)? \+(?P\d+),?(?P\d+)? @@`) match := namedHunkRegex.FindStringSubmatch(line) + if len(match) == 0 { + return hunk{}, fmt.Errorf("invalid hunk header: %q", line) + } result := make(map[string]string) for i, name := range namedHunkRegex.SubexpNames() { if i != 0 && name != "" { @@ -67,7 +24,7 @@ func NewHunk(line string) (Hunk, error) { } startLineNumberOld, err := strconv.Atoi(result["start_old"]) if err != nil { - return Hunk{}, fmt.Errorf("failed to parse start line number old: %w", err) + return hunk{}, fmt.Errorf("failed to parse start line number old: %w", err) } countOld, err := strconv.Atoi(result["count_old"]) if err != nil { @@ -75,13 +32,13 @@ func NewHunk(line string) (Hunk, error) { } startLineNumberNew, err := strconv.Atoi(result["start_new"]) if err != nil { - return Hunk{}, fmt.Errorf("failed to parse start line number new: %w", err) + return hunk{}, fmt.Errorf("failed to parse start line number new: %w", err) } countNew, err := strconv.Atoi(result["count_new"]) if err != nil { countNew = 1 } - return Hunk{ + return hunk{ StartLineNumberOld: startLineNumberOld, CountOld: countOld, StartLineNumberNew: startLineNumberNew, @@ -89,155 +46,209 @@ func NewHunk(line string) (Hunk, error) { }, nil } -type FileDiffType string - -const ( - FileDiffTypeAdded FileDiffType = "add" - FileDiffTypeDeleted FileDiffType = "delete" - FileDiffTypeModified FileDiffType = "modify" -) - -type BinaryDeltaType string +type parserMode int const ( - BinaryDeltaTypeLiteral BinaryDeltaType = "literal" - BinaryDeltaTypeDelta BinaryDeltaType = "delta" -) - -type BinaryPatch struct { - Type BinaryDeltaType `json:"type"` - Count int - Content string -} - -// FileDiff Source of truth: https://github.com/git/git/blob/master/diffcore.h#L106 -// Implemented in https://github.com/git/git/blob/master/diff.c#L3496 -type FileDiff struct { - FromFile string `json:"from_file"` - ToFile string `json:"to_file"` - Type FileDiffType `json:"type"` - IsBinary bool `json:"is_binary"` - NewMode string `json:"new_mode"` - Hunks []Hunk `json:"hunks"` - BinaryPatch []BinaryPatch `json:"binary_patch"` -} - -type Diff struct { - FileDiff []FileDiff `json:"file_diff"` -} - -type ParserMode int - -const ( - modeHeader ParserMode = iota + modeHeader parserMode = iota modeHunk modeBinary ) type parser struct { - diff Diff + diff diff err []error - mode ParserMode + mode parserMode } func (p *parser) VisitLine(diff string) { - if p.tryVisitHeader(diff) { + line := trimSingleLineEnding(diff) + hasNewline := strings.HasSuffix(diff, "\n") + + if p.tryVisitHeader(line) { return } - if p.tryVisitBinary(diff) { + if p.tryVisitBinary(line) { return } - if p.tryVisitHunkHeader(diff) { + if p.tryVisitHunkHeader(line) { return } + fileHEAD := len(p.diff.FileDiff) - 1 + if fileHEAD < 0 { + p.err = append(p.err, fmt.Errorf("%w: %s", errUnhandled, line)) + return + } + hunkHEAD := len(p.diff.FileDiff[fileHEAD].Hunks) - 1 if hunkHEAD < 0 { - p.err = append(p.err, fmt.Errorf("%w: %s", ErrUnhandled, diff)) + p.err = append(p.err, fmt.Errorf("%w: %s", errUnhandled, diff)) return } - changeHead := len(p.diff.FileDiff[fileHEAD].Hunks[hunkHEAD].ChangeList) - 1 + + hunk := &p.diff.FileDiff[fileHEAD].Hunks[hunkHEAD] + // swallow extra, unused lines from start - if strings.HasPrefix(diff, "~") && - !p.diff.FileDiff[fileHEAD].Hunks[hunkHEAD].ChangeList.IsSignificant() { - p.diff.FileDiff[fileHEAD].Hunks[hunkHEAD].StartLineNumberOld += 1 - p.diff.FileDiff[fileHEAD].Hunks[hunkHEAD].StartLineNumberNew += 1 - p.diff.FileDiff[fileHEAD].Hunks[hunkHEAD].CountOld -= 1 - p.diff.FileDiff[fileHEAD].Hunks[hunkHEAD].CountNew -= 1 - p.diff.FileDiff[fileHEAD].Hunks[hunkHEAD].ChangeList = []ContentChange{} - } - if strings.HasPrefix(diff, "+") { - if changeHead > 0 && p.diff.FileDiff[fileHEAD].Hunks[hunkHEAD].ChangeList[changeHead].Type == ContentChangeTypeDelete { - p.diff.FileDiff[fileHEAD].Hunks[hunkHEAD].ChangeList[changeHead].Type = ContentChangeTypeModify - p.diff.FileDiff[fileHEAD].Hunks[hunkHEAD].ChangeList[changeHead].To = strings.TrimPrefix(diff, "+") + if strings.HasPrefix(line, "~") && !hunk.ChangeList.isSignificant() { + hunk.StartLineNumberOld++ + hunk.StartLineNumberNew++ + hunk.CountOld-- + hunk.CountNew-- + hunk.ChangeList = []contentChange{} + } + + if strings.HasPrefix(line, "+") { + if len(hunk.ChangeList) > 0 && hunk.ChangeList[len(hunk.ChangeList)-1].Type == contentChangeTypeDelete { + hunk.ChangeList[len(hunk.ChangeList)-1].Type = contentChangeTypeModify + hunk.ChangeList[len(hunk.ChangeList)-1].To = trimSingleLineEnding(strings.TrimPrefix(line, "+")) + hunk.Lines = append(hunk.Lines, hunkLine{ + Kind: '+', + Text: trimSingleLineEnding(strings.TrimPrefix(line, "+")), + HasNewline: hasNewline, + }) return } - p.diff.FileDiff[fileHEAD].Hunks[hunkHEAD].ChangeList = append(p.diff.FileDiff[fileHEAD].Hunks[hunkHEAD].ChangeList, ContentChange{ - Type: ContentChangeTypeAdd, + hunk.ChangeList = append(hunk.ChangeList, contentChange{ + Type: contentChangeTypeAdd, From: "", - To: strings.TrimPrefix(diff, "+"), + To: trimSingleLineEnding(strings.TrimPrefix(line, "+")), + }) + hunk.Lines = append(hunk.Lines, hunkLine{ + Kind: '+', + Text: trimSingleLineEnding(strings.TrimPrefix(line, "+")), + HasNewline: hasNewline, }) return } - if strings.HasPrefix(diff, "-") { - p.diff.FileDiff[fileHEAD].Hunks[hunkHEAD].ChangeList = append(p.diff.FileDiff[fileHEAD].Hunks[hunkHEAD].ChangeList, ContentChange{ - Type: ContentChangeTypeDelete, - From: strings.TrimPrefix(diff, "-"), + + if strings.HasPrefix(line, "-") { + hunk.ChangeList = append(hunk.ChangeList, contentChange{ + Type: contentChangeTypeDelete, + From: trimSingleLineEnding(strings.TrimPrefix(line, "-")), To: "", }) + hunk.Lines = append(hunk.Lines, hunkLine{ + Kind: '-', + Text: trimSingleLineEnding(strings.TrimPrefix(line, "-")), + HasNewline: hasNewline, + }) + return + } + + if strings.HasPrefix(line, " ") { + hunk.ChangeList = append(hunk.ChangeList, contentChange{ + Type: contentChangeTypeNOOP, + From: line, + To: line, + }) + hunk.Lines = append(hunk.Lines, hunkLine{ + Kind: ' ', + Text: trimSingleLineEnding(strings.TrimPrefix(line, " ")), + HasNewline: hasNewline, + }) return } - if diff == "~" { - p.diff.FileDiff[fileHEAD].Hunks[hunkHEAD].ChangeList = append(p.diff.FileDiff[fileHEAD].Hunks[hunkHEAD].ChangeList, ContentChange{ - Type: ContentChangeTypeNOOP, + + if line == "~" { + hunk.ChangeList = append(hunk.ChangeList, contentChange{ + Type: contentChangeTypeNOOP, From: "\n", To: "\n", }) + return } - p.diff.FileDiff[fileHEAD].Hunks[hunkHEAD].ChangeList = append(p.diff.FileDiff[fileHEAD].Hunks[hunkHEAD].ChangeList, ContentChange{ - Type: ContentChangeTypeNOOP, - From: diff, - To: diff, - }) + + if strings.HasPrefix(line, `\ No newline at end of file`) { + if n := len(hunk.Lines); n > 0 { + hunk.Lines[n-1].markNoNewline() + } else { + p.err = append(p.err, fmt.Errorf("unexpected no-newline marker without a preceding patch line")) + return + } + hunk.ChangeList = append(hunk.ChangeList, contentChange{ + Type: contentChangeTypeNOOP, + From: line, + To: line, + }) + return + } + + if line == "" { + hunk.ChangeList = append(hunk.ChangeList, contentChange{ + Type: contentChangeTypeNOOP, + From: line, + To: line, + }) + return + } + + p.err = append(p.err, fmt.Errorf("unexpected hunk line %q", line)) } func (p *parser) tryVisitHeader(diff string) bool { // format: "diff --git a/README.md b/README.md" if strings.HasPrefix(diff, "diff ") { - strings.Split(diff, " ") + p.finalizeCurrentHunk() p.diff.FileDiff = append(p.diff.FileDiff, p.parseDiffLine(diff)) p.mode = modeHeader return true } + fileHEAD := len(p.diff.FileDiff) - 1 - if len(diff) == 0 && p.mode == modeHeader { + if diff == "" && p.mode == modeHeader { return true } if fileHEAD < 0 { - p.err = append(p.err, fmt.Errorf("%w: %s", ErrUnhandled, diff)) + p.err = append(p.err, fmt.Errorf("%w: %s", errUnhandled, diff)) return true } if p.mode != modeHeader { return false } + if strings.HasPrefix(diff, "+++ ") || strings.HasPrefix(diff, "--- ") { // ignore -- we're still in the FileDiff and we've already captured the file names return true } if strings.HasPrefix(diff, "index ") { + p.parseIndexHeader(diff, fileHEAD) return true } - if done := p.visitFileModeHeader(diff, fileHEAD); done { - return done + if strings.HasPrefix(diff, "similarity index ") { + p.diff.FileDiff[fileHEAD].SimilarityIndex = parsePercentValue(strings.TrimPrefix(diff, "similarity index ")) + return true } - - if strings.HasPrefix(diff, "rename from ") || strings.HasPrefix(diff, "rename to ") { - p.diff.FileDiff[fileHEAD].Type = FileDiffTypeModified + if strings.HasPrefix(diff, "dissimilarity index ") { + p.diff.FileDiff[fileHEAD].DissimilarityIndex = parsePercentValue(strings.TrimPrefix(diff, "dissimilarity index ")) + return true + } + if strings.HasPrefix(diff, "copy from ") { + p.diff.FileDiff[fileHEAD].CopyFrom = strings.TrimPrefix(diff, "copy from ") + p.diff.FileDiff[fileHEAD].Type = fileDiffTypeModified + return true + } + if strings.HasPrefix(diff, "copy to ") { + p.diff.FileDiff[fileHEAD].CopyTo = strings.TrimPrefix(diff, "copy to ") + p.diff.FileDiff[fileHEAD].Type = fileDiffTypeModified + return true + } + if strings.HasPrefix(diff, "rename from ") { + p.diff.FileDiff[fileHEAD].RenameFrom = strings.TrimPrefix(diff, "rename from ") + p.diff.FileDiff[fileHEAD].Type = fileDiffTypeModified + return true + } + if strings.HasPrefix(diff, "rename to ") { + p.diff.FileDiff[fileHEAD].RenameTo = strings.TrimPrefix(diff, "rename to ") + p.diff.FileDiff[fileHEAD].Type = fileDiffTypeModified return true } + if done := p.visitFileModeHeader(diff, fileHEAD); done { + return done + } + if strings.HasPrefix(diff, "GIT binary patch") { - p.diff.FileDiff[fileHEAD].Type = FileDiffTypeModified + p.diff.FileDiff[fileHEAD].Type = fileDiffTypeModified p.diff.FileDiff[fileHEAD].IsBinary = true p.mode = modeBinary return true @@ -251,33 +262,52 @@ func (p *parser) tryVisitHeader(diff string) bool { if strings.HasPrefix(diff, "similarity") { return true } + // continue to parse if fileHEAD > 0 return fileHEAD < 0 } func (p *parser) visitFileModeHeader(diff string, fileHEAD int) bool { if strings.HasPrefix(diff, "new file mode ") { - p.diff.FileDiff[fileHEAD].Type = FileDiffTypeModified + p.diff.FileDiff[fileHEAD].Type = fileDiffTypeModified p.diff.FileDiff[fileHEAD].NewMode = strings.TrimPrefix(diff, "new file mode ") return true } if strings.HasPrefix(diff, "new mode ") { - p.diff.FileDiff[fileHEAD].Type = FileDiffTypeModified + p.diff.FileDiff[fileHEAD].Type = fileDiffTypeModified p.diff.FileDiff[fileHEAD].NewMode = strings.TrimPrefix(diff, "new mode ") return true } if strings.HasPrefix(diff, "deleted file mode ") { - p.diff.FileDiff[fileHEAD].Type = FileDiffTypeDeleted + p.diff.FileDiff[fileHEAD].Type = fileDiffTypeDeleted + p.diff.FileDiff[fileHEAD].OldMode = strings.TrimPrefix(diff, "deleted file mode ") return true } if strings.HasPrefix(diff, "old mode ") { - p.diff.FileDiff[fileHEAD].Type = FileDiffTypeModified + p.diff.FileDiff[fileHEAD].Type = fileDiffTypeModified + p.diff.FileDiff[fileHEAD].OldMode = strings.TrimPrefix(diff, "old mode ") return true } return false } +func (p *parser) parseIndexHeader(diff string, fileHEAD int) { + fields := strings.Fields(strings.TrimPrefix(diff, "index ")) + if len(fields) == 0 { + return + } + + parts := strings.SplitN(fields[0], "..", 2) + if len(parts) == 2 { + p.diff.FileDiff[fileHEAD].IndexOld = parts[0] + p.diff.FileDiff[fileHEAD].IndexNew = parts[1] + } + if len(fields) > 1 { + p.diff.FileDiff[fileHEAD].IndexMode = fields[1] + } +} + func (p *parser) tryVisitBinary(diff string) bool { if p.mode != modeBinary { return false @@ -287,27 +317,27 @@ func (p *parser) tryVisitBinary(diff string) bool { return true } if strings.HasPrefix(diff, "delta ") { - p.diff.FileDiff[fileHEAD].Type = FileDiffTypeModified + p.diff.FileDiff[fileHEAD].Type = fileDiffTypeModified startByteCount, err := strconv.Atoi(strings.Split(diff, " ")[1]) if err != nil { return true } - p.diff.FileDiff[fileHEAD].BinaryPatch = append(p.diff.FileDiff[fileHEAD].BinaryPatch, BinaryPatch{ - Type: BinaryDeltaTypeDelta, + p.diff.FileDiff[fileHEAD].BinaryPatch = append(p.diff.FileDiff[fileHEAD].BinaryPatch, binaryPatch{ + Type: binaryDeltaTypeDelta, Count: startByteCount, Content: "", }) return true } if strings.HasPrefix(diff, "literal ") { - p.diff.FileDiff[fileHEAD].Type = FileDiffTypeModified + p.diff.FileDiff[fileHEAD].Type = fileDiffTypeModified startByteCount, err := strconv.Atoi(strings.Split(diff, " ")[1]) if err != nil { return true } - p.diff.FileDiff[fileHEAD].BinaryPatch = append(p.diff.FileDiff[fileHEAD].BinaryPatch, BinaryPatch{ - Type: BinaryDeltaTypeLiteral, + p.diff.FileDiff[fileHEAD].BinaryPatch = append(p.diff.FileDiff[fileHEAD].BinaryPatch, binaryPatch{ + Type: binaryDeltaTypeLiteral, Count: startByteCount, Content: "", }) @@ -327,7 +357,8 @@ func (p *parser) tryVisitHunkHeader(diff string) bool { return false } if strings.HasPrefix(diff, "@@") { - hunk, err := NewHunk(diff) + p.finalizeCurrentHunk() + hunk, err := newHunk(diff) if err != nil { p.err = append(p.err, err) } @@ -338,7 +369,20 @@ func (p *parser) tryVisitHunkHeader(diff string) bool { return false } -func (p *parser) parseDiffLine(line string) FileDiff { +func (p *parser) finalizeCurrentHunk() { + if len(p.diff.FileDiff) == 0 { + return + } + fileHEAD := len(p.diff.FileDiff) - 1 + hunks := p.diff.FileDiff[fileHEAD].Hunks + if len(hunks) == 0 { + return + } + p.diff.FileDiff[fileHEAD].Hunks[len(hunks)-1].markEOFMarkers() +} + +func (p *parser) parseDiffLine(line string) fileDiff { + line = trimSingleLineEnding(line) filesStr := line[11:] var oldPath, newPath string @@ -366,37 +410,50 @@ func (p *parser) parseDiffLine(line string) FileDiff { newPath = segs[1][3 : len(segs[1])-1] } - return FileDiff{ + return fileDiff{ FromFile: oldPath, ToFile: newPath, } } -// Converts git diff --word-diff=porcelain output to a Diff object. -func Parse(diff string) (Diff, []error) { +func parsePercentValue(raw string) int { + raw = strings.TrimSuffix(raw, "%") + value, err := strconv.Atoi(raw) + if err != nil { + return 0 + } + return value +} + +// Converts git diff --word-diff=porcelain output to a diff object. +func parse(diff string) (diff, []error) { p := parser{} - lines := strings.Split(diff, "\n") + lines := splitLinesPreserveNewline(diff) for i := 0; i < len(lines); i++ { p.VisitLine(lines[i]) } + if strings.HasSuffix(diff, "\n") { + p.VisitLine("") + } + p.finalizeCurrentHunk() return p.diff, p.err } // SignificantChange Allows a structured diff to be passed into the `isSignificant` function to determine significance. That function can return a message, which is optionally passed as the final argument // Returns the first significant change found, or false if non found. -func SignificantChange(diff string, isSignificant func(*FileDiff, *ContentChange) (bool, string)) (bool, string, error) { - parsed, err := Parse(diff) - if len(err) > 0 { - return true, "", fmt.Errorf("failed to parse diff: %w", err[0]) +func significantChange(diff string, isSignificant func(*fileDiff, *contentChange) (bool, string)) (isSignificantResult bool, resultMsg string, resultErr error) { + parsed, errs := parse(diff) + if len(errs) > 0 { + return true, "", fmt.Errorf("failed to parse diff: %w", errs[0]) } - for _, fileDiff := range parsed.FileDiff { - if significant, msg := isSignificant(&fileDiff, &ContentChange{}); significant { + for i := range parsed.FileDiff { + if sig, msg := isSignificant(&parsed.FileDiff[i], &contentChange{}); sig { return true, msg, nil } - for _, hunk := range fileDiff.Hunks { + for _, hunk := range parsed.FileDiff[i].Hunks { for _, change := range hunk.ChangeList { - if significant, msg := isSignificant(&fileDiff, &change); significant { + if sig, msg := isSignificant(&parsed.FileDiff[i], &change); sig { return true, msg, nil } } diff --git a/parser_test.go b/parser_test.go index 0bb359b..96126e7 100644 --- a/parser_test.go +++ b/parser_test.go @@ -1,4 +1,4 @@ -package git_diff_parser_test +package git_diff_parser import ( "embed" @@ -9,8 +9,6 @@ import ( "strings" "testing" - git_diff_parser "github.com/speakeasy-api/git-diff-parser" - "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) @@ -26,19 +24,19 @@ func TestParse(t *testing.T) { want bool } significantDiffs, err := testdata.ReadDir("testdata/significant") - assert.NoError(t, err) + require.NoError(t, err) insignificantDiffs, err := testdata.ReadDir("testdata/insignificant") - assert.NoError(t, err) + require.NoError(t, err) tests := []SignificanceTest{} for _, testFile := range significantDiffs { if !strings.HasSuffix(testFile.Name(), "diff") { continue } content, err := testdata.ReadFile("testdata/significant/" + testFile.Name()) - assert.NoError(t, err) + require.NoError(t, err) tests = append(tests, SignificanceTest{ name: testFile.Name(), - relativePath: filepath.Join("testdata/significant", testFile.Name()), + relativePath: filepath.Join("testdata", "significant", testFile.Name()), input: string(content), want: true, }) @@ -48,10 +46,10 @@ func TestParse(t *testing.T) { continue } content, err := testdata.ReadFile("testdata/insignificant/" + testFile.Name()) - assert.NoError(t, err) + require.NoError(t, err) tests = append(tests, SignificanceTest{ name: testFile.Name(), - relativePath: filepath.Join("testdata/insignificant", testFile.Name()), + relativePath: filepath.Join("testdata", "insignificant", testFile.Name()), input: string(content), want: false, }) @@ -61,7 +59,7 @@ func TestParse(t *testing.T) { test := test t.Run(test.name, func(t *testing.T) { t.Parallel() - got, msg, err := git_diff_parser.SignificantChange(test.input, func(diff *git_diff_parser.FileDiff, change *git_diff_parser.ContentChange) (bool, string) { + got, msg, err := significantChange(test.input, func(diff *fileDiff, change *contentChange) (bool, string) { if diff.ToFile == "gen.yaml" || diff.ToFile == "RELEASES.md" { return false, "" } @@ -75,23 +73,82 @@ func TestParse(t *testing.T) { return false, "" } - if diff.Type == git_diff_parser.FileDiffTypeModified { + if diff.Type == fileDiffTypeModified { return true, fmt.Sprintf("significant diff %#v", diff) } - if change.Type == git_diff_parser.ContentChangeTypeNOOP { + if change.Type == contentChangeTypeNOOP { return false, "" } return true, fmt.Sprintf("significant change %#v in %s", change, diff.ToFile) }) require.NoError(t, err) - MatchMessageSnapshot(t, test.relativePath+".msg", msg) + MatchMessageSnapshot(t, test.relativePath+".msg", normalizeSnapshotTypes(msg)) assert.Equal(t, test.want, got) }) } } -func MatchMessageSnapshot(t *testing.T, snapshotName string, content string) { +func TestParseCapturesFileMetadataAndHunkLines(t *testing.T) { + t.Parallel() + + diff := `diff --git a/src.txt b/dst.txt +similarity index 92% +rename from src.txt +rename to dst.txt +index 1234567..89abcde 100755 +old mode 100644 +new mode 100755 +--- a/src.txt ++++ b/dst.txt +@@ -1,2 +1,2 @@ +-old ++new + second +\ No newline at end of file +` + + parsed, errs := parse(diff) + require.Empty(t, errs) + require.Len(t, parsed.FileDiff, 1) + + fileDiff := parsed.FileDiff[0] + assert.Equal(t, "src.txt", fileDiff.FromFile) + assert.Equal(t, "dst.txt", fileDiff.ToFile) + assert.Equal(t, fileDiffTypeModified, fileDiff.Type) + assert.Equal(t, "1234567", fileDiff.IndexOld) + assert.Equal(t, "89abcde", fileDiff.IndexNew) + assert.Equal(t, "100755", fileDiff.IndexMode) + assert.Equal(t, "100644", fileDiff.OldMode) + assert.Equal(t, "100755", fileDiff.NewMode) + assert.Equal(t, 92, fileDiff.SimilarityIndex) + assert.Equal(t, "src.txt", fileDiff.RenameFrom) + assert.Equal(t, "dst.txt", fileDiff.RenameTo) + + require.Len(t, fileDiff.Hunks, 1) + hunk := fileDiff.Hunks[0] + assert.Equal(t, 1, hunk.StartLineNumberOld) + assert.Equal(t, 1, hunk.StartLineNumberNew) + assert.Equal(t, 2, hunk.CountOld) + assert.Equal(t, 2, hunk.CountNew) + require.Len(t, hunk.Lines, 3) + + assert.Equal(t, byte('-'), hunk.Lines[0].Kind) + assert.Equal(t, "old", hunk.Lines[0].Text) + assert.True(t, hunk.Lines[0].HasNewline) + + assert.Equal(t, byte('+'), hunk.Lines[1].Kind) + assert.Equal(t, "new", hunk.Lines[1].Text) + assert.True(t, hunk.Lines[1].HasNewline) + + assert.Equal(t, byte(' '), hunk.Lines[2].Kind) + assert.Equal(t, "second", hunk.Lines[2].Text) + assert.False(t, hunk.Lines[2].HasNewline) + assert.False(t, hunk.Lines[2].OldEOF) + assert.False(t, hunk.Lines[2].NewEOF) +} + +func MatchMessageSnapshot(t *testing.T, snapshotName, content string) { t.Helper() _, filename, _, ok := runtime.Caller(0) require.True(t, ok) @@ -100,7 +157,7 @@ func MatchMessageSnapshot(t *testing.T, snapshotName string, content string) { if _, err := os.Stat(snapshotFile); err != nil { f, err := os.OpenFile(snapshotFile, os.O_APPEND|os.O_CREATE|os.O_RDWR, os.ModePerm) require.NoError(t, err) - defer f.Close() + defer func() { _ = f.Close() }() _, err = f.WriteString(content) require.NoError(t, err) return @@ -109,3 +166,13 @@ func MatchMessageSnapshot(t *testing.T, snapshotName string, content string) { require.NoError(t, err) require.Equal(t, string(f), content) } + +func normalizeSnapshotTypes(content string) string { + replacer := strings.NewReplacer( + "git_diff_parser.contentChange", "git_diff_parser.ContentChange", + "git_diff_parser.changeList", "git_diff_parser.ChangeList", + "git_diff_parser.hunk", "git_diff_parser.Hunk", + "git_diff_parser.binaryPatch", "git_diff_parser.BinaryPatch", + ) + return replacer.Replace(content) +} diff --git a/patchset.go b/patchset.go new file mode 100644 index 0000000..0ba09a8 --- /dev/null +++ b/patchset.go @@ -0,0 +1,178 @@ +package git_diff_parser + +import ( + "bytes" + "errors" + "fmt" + "strings" +) + +var ( + errPatchCreate = errors.New("patch creates are not supported") + errPatchDelete = errors.New("patch deletes are not supported") + errPatchRename = errors.New("patch renames are not supported") + errPatchModeChange = errors.New("patch mode changes are not supported") + errPatchBinary = errors.New("binary patches are not supported") +) + +type patchsetOperation string + +const ( + patchsetOperationCreate patchsetOperation = "create" + patchsetOperationDelete patchsetOperation = "delete" + patchsetOperationRename patchsetOperation = "rename" + patchsetOperationCopy patchsetOperation = "copy" + patchsetOperationModeChange patchsetOperation = "mode change" + patchsetOperationBinary patchsetOperation = "binary" +) + +type unsupportedPatchError struct { + Operation patchsetOperation + Path string + From string + To string +} + +func (e *unsupportedPatchError) Error() string { + switch e.Operation { + case patchsetOperationCreate: + if e.Path != "" { + return fmt.Sprintf("patch creates are not supported for %q", e.Path) + } + return "patch creates are not supported" + case patchsetOperationDelete: + if e.Path != "" { + return fmt.Sprintf("patch deletes are not supported for %q", e.Path) + } + return "patch deletes are not supported" + case patchsetOperationRename: + if e.From != "" || e.To != "" { + return fmt.Sprintf("patch renames are not supported: %q -> %q", e.From, e.To) + } + return "patch renames are not supported" + case patchsetOperationModeChange: + if e.Path != "" { + return fmt.Sprintf("patch mode changes are not supported for %q", e.Path) + } + return "patch mode changes are not supported" + case patchsetOperationBinary: + if e.Path != "" { + return fmt.Sprintf("binary patches are not supported for %q", e.Path) + } + return "binary patches are not supported" + default: + return "unsupported patch" + } +} + +func (e *unsupportedPatchError) Is(target error) bool { + switch target { + case errPatchCreate: + return e.Operation == patchsetOperationCreate + case errPatchDelete: + return e.Operation == patchsetOperationDelete + case errPatchRename: + return e.Operation == patchsetOperationRename + case errPatchModeChange: + return e.Operation == patchsetOperationModeChange + case errPatchBinary: + return e.Operation == patchsetOperationBinary + default: + return false + } +} + +type patchset struct { + Files []patchsetFile +} + +type patchsetFile struct { + Diff fileDiff + Patch []byte +} + +func parsePatchset(patchData []byte) (patchset, []error) { + parsed, errs := parse(string(patchData)) + if len(errs) > 0 { + return patchset{}, errs + } + + chunks := splitPatchsetChunks(patchData) + if len(chunks) != len(parsed.FileDiff) { + return patchset{}, []error{ + fmt.Errorf("parsed %d file diffs but split %d patch fragments", len(parsed.FileDiff), len(chunks)), + } + } + + files := make([]patchsetFile, len(chunks)) + for i := range chunks { + files[i] = patchsetFile{ + Diff: parsed.FileDiff[i], + Patch: chunks[i], + } + } + + return patchset{Files: files}, nil +} + +func (p patchset) apply(tree map[string][]byte) (map[string][]byte, error) { + out := cloneTree(tree) + for i := range p.Files { + if err := applyPatchsetFile(out, &p.Files[i]); err != nil { + return nil, err + } + } + return out, nil +} + +func applyPatchset(tree map[string][]byte, patchData []byte) (map[string][]byte, error) { + patchset, errs := parsePatchset(patchData) + if len(errs) > 0 { + return nil, fmt.Errorf("unsupported patch syntax: %w", errs[0]) + } + return patchset.apply(tree) +} + +func ApplyPatchset(tree map[string][]byte, patchData []byte) (map[string][]byte, error) { + return applyPatchset(tree, patchData) +} + +func cloneTree(tree map[string][]byte) map[string][]byte { + out := make(map[string][]byte, len(tree)) + for path, content := range tree { + out[path] = append([]byte(nil), content...) + } + return out +} + +func splitPatchsetChunks(patchData []byte) [][]byte { + lines := splitLinesPreserveNewline(string(patchData)) + if len(lines) == 0 { + return nil + } + + chunks := make([][]byte, 0) + var buf bytes.Buffer + started := false + + flush := func() { + if !started || buf.Len() == 0 { + return + } + chunks = append(chunks, append([]byte(nil), buf.Bytes()...)) + buf.Reset() + } + + for _, line := range lines { + if strings.HasPrefix(strings.TrimRight(line, "\n"), "diff --git ") { + flush() + started = true + } + if started { + buf.WriteString(line) + } + } + + flush() + return chunks +} diff --git a/patchset_apply.go b/patchset_apply.go new file mode 100644 index 0000000..e4767b6 --- /dev/null +++ b/patchset_apply.go @@ -0,0 +1,151 @@ +package git_diff_parser + +import "fmt" + +const patchsetOperationModify patchsetOperation = "modify" + +func applyPatchsetFile(tree map[string][]byte, file *patchsetFile) error { + if file.Diff.IsBinary { + return &unsupportedPatchError{ + Operation: patchsetOperationBinary, + Path: firstNonEmpty(file.Diff.ToFile, file.Diff.FromFile), + } + } + + op, sourcePath, targetPath, err := determinePatchsetOperation(tree, &file.Diff) + if err != nil { + return err + } + + switch op { + case patchsetOperationCreate: + if _, exists := tree[targetPath]; exists { + return fmt.Errorf("cannot create existing file %q", targetPath) + } + content, err := applyPatchsetContent(nil, file) + if err != nil { + return err + } + tree[targetPath] = append([]byte(nil), content...) + return nil + case patchsetOperationDelete: + content, exists := tree[sourcePath] + if !exists { + return fmt.Errorf("cannot delete missing file %q", sourcePath) + } + if len(file.Diff.Hunks) > 0 { + if _, err := applyPatchsetContent(content, file); err != nil { + return err + } + } + delete(tree, sourcePath) + return nil + case patchsetOperationRename: + content, exists := tree[sourcePath] + if !exists { + return fmt.Errorf("cannot rename missing file %q", sourcePath) + } + if targetPath != sourcePath { + if _, exists := tree[targetPath]; exists { + return fmt.Errorf("cannot rename %q to existing file %q", sourcePath, targetPath) + } + } + applied, err := applyPatchsetContent(content, file) + if err != nil { + return err + } + delete(tree, sourcePath) + tree[targetPath] = append([]byte(nil), applied...) + return nil + case patchsetOperationCopy: + content, exists := tree[sourcePath] + if !exists { + return fmt.Errorf("cannot copy missing file %q", sourcePath) + } + if _, exists := tree[targetPath]; exists { + return fmt.Errorf("cannot copy to existing file %q", targetPath) + } + applied, err := applyPatchsetContent(content, file) + if err != nil { + return err + } + tree[targetPath] = append([]byte(nil), applied...) + return nil + case patchsetOperationModeChange, patchsetOperationModify: + content, exists := tree[targetPath] + if !exists { + return fmt.Errorf("cannot modify missing file %q", targetPath) + } + applied, err := applyPatchsetContent(content, file) + if err != nil { + return err + } + tree[targetPath] = append([]byte(nil), applied...) + return nil + default: + return fmt.Errorf("unsupported patch operation") + } +} + +func determinePatchsetOperation(tree map[string][]byte, fileDiff *fileDiff) (op patchsetOperation, sourcePath, targetPath string, err error) { + sourcePath, targetPath = patchsetPaths(fileDiff) + + switch { + case fileDiff.RenameFrom != "" || fileDiff.RenameTo != "": + return patchsetOperationRename, sourcePath, targetPath, nil + case fileDiff.CopyFrom != "" || fileDiff.CopyTo != "": + return patchsetOperationCopy, sourcePath, targetPath, nil + case fileDiff.Type == fileDiffTypeAdded: + return patchsetOperationCreate, "", targetPath, nil + case fileDiff.Type == fileDiffTypeDeleted: + return patchsetOperationDelete, sourcePath, "", nil + } + + if fileDiff.NewMode != "" && fileDiff.OldMode == "" { + if _, exists := tree[targetPath]; exists { + return "", "", "", fmt.Errorf("cannot create existing file %q", targetPath) + } + return patchsetOperationCreate, "", targetPath, nil + } + if fileDiff.OldMode != "" || fileDiff.NewMode != "" { + return patchsetOperationModeChange, sourcePath, targetPath, nil + } + + return patchsetOperationModify, sourcePath, targetPath, nil +} + +func patchsetPaths(fileDiff *fileDiff) (sourcePath, targetPath string) { + sourcePath = firstNonEmpty(fileDiff.RenameFrom, fileDiff.CopyFrom, fileDiff.FromFile, fileDiff.ToFile) + targetPath = firstNonEmpty(fileDiff.RenameTo, fileDiff.CopyTo, fileDiff.ToFile, fileDiff.FromFile) + return sourcePath, targetPath +} + +func applyPatchsetContent(pristine []byte, file *patchsetFile) ([]byte, error) { + if len(file.Diff.Hunks) == 0 { + return append([]byte(nil), pristine...), nil + } + + hunks := make([]patchHunk, 0, len(file.Diff.Hunks)) + for i := range file.Diff.Hunks { + hunks = append(hunks, patchHunkFromHunk(&file.Diff.Hunks[i])) + } + + result, err := newPatchApply(applyOptions{Mode: applyModeApply}).applyValidatedPatch(pristine, validatedPatch{ + rejectHead: formatRejectHeader(&file.Diff), + hunks: hunks, + }) + if err != nil { + return nil, err + } + + return append([]byte(nil), result.Content...), nil +} + +func firstNonEmpty(values ...string) string { + for _, value := range values { + if value != "" { + return value + } + } + return "" +} diff --git a/patchset_test.go b/patchset_test.go new file mode 100644 index 0000000..382fa85 --- /dev/null +++ b/patchset_test.go @@ -0,0 +1,286 @@ +package git_diff_parser + +import ( + "path/filepath" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestParsePatchset(t *testing.T) { + t.Parallel() + + patchA := buildPatch(t, "alpha.txt", []byte("alpha\none\n"), []byte("alpha\ntwo\n")) + patchB := buildPatch(t, "beta.txt", []byte("beta\none\n"), []byte("beta\ntwo\n")) + patchsetData := append(append([]byte{}, patchA...), patchB...) + + patchset, errs := parsePatchset(patchsetData) + require.Empty(t, errs) + require.Len(t, patchset.Files, 2) + + assert.Equal(t, "alpha.txt", patchset.Files[0].Diff.ToFile) + assert.Equal(t, "beta.txt", patchset.Files[1].Diff.ToFile) + assert.Contains(t, string(patchset.Files[0].Patch), "diff --git a/alpha.txt b/alpha.txt") + assert.Contains(t, string(patchset.Files[1].Patch), "diff --git a/beta.txt b/beta.txt") +} + +func TestPatchsetApply_MultipleFiles(t *testing.T) { + t.Parallel() + + original := map[string][]byte{ + "alpha.txt": []byte("alpha\none\n"), + "beta.txt": []byte("beta\none\n"), + "keep.txt": []byte("unchanged\n"), + } + + patchA := buildPatch(t, "alpha.txt", original["alpha.txt"], []byte("alpha\ntwo\n")) + patchB := buildPatch(t, "beta.txt", original["beta.txt"], []byte("beta\ntwo\n")) + patchsetData := append(append([]byte{}, patchA...), patchB...) + + applied, err := applyPatchset(original, patchsetData) + require.NoError(t, err) + + assert.Equal(t, []byte("alpha\ntwo\n"), applied["alpha.txt"]) + assert.Equal(t, []byte("beta\ntwo\n"), applied["beta.txt"]) + assert.Equal(t, []byte("unchanged\n"), applied["keep.txt"]) + assert.Equal(t, []byte("alpha\none\n"), original["alpha.txt"]) + assert.Equal(t, []byte("beta\none\n"), original["beta.txt"]) +} + +func TestPatchsetApply_TextTreeOperations(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + patch []byte + tree map[string][]byte + wantTree map[string][]byte + }{ + { + name: "create", + patch: mustReadFile(t, filepath.Join("testdata", "significant", "add.diff")), + tree: map[string][]byte{}, + wantTree: map[string][]byte{"a.txt": []byte("a\n")}, + }, + { + name: "delete", + patch: mustReadFile(t, filepath.Join("testdata", "significant", "rm.diff")), + tree: map[string][]byte{"a.txt": []byte("a\n")}, + wantTree: map[string][]byte{}, + }, + { + name: "rename", + patch: []byte(`diff --git a/src.txt b/dst.txt +similarity index 100% +rename from src.txt +rename to dst.txt +index 1234567..89abcde 100644 +--- a/src.txt ++++ b/dst.txt +@@ -1,2 +1,2 @@ +-alpha ++bravo + charlie +`), + tree: map[string][]byte{"src.txt": []byte("alpha\ncharlie\n")}, + wantTree: map[string][]byte{"dst.txt": []byte("bravo\ncharlie\n")}, + }, + { + name: "copy", + patch: []byte(`diff --git a/src.txt b/dst.txt +similarity index 100% +copy from src.txt +copy to dst.txt +index 1234567..89abcde 100644 +--- a/src.txt ++++ b/dst.txt +@@ -1,2 +1,3 @@ + alpha ++bravo + charlie +`), + tree: map[string][]byte{"src.txt": []byte("alpha\ncharlie\n")}, + wantTree: map[string][]byte{ + "src.txt": []byte("alpha\ncharlie\n"), + "dst.txt": []byte("alpha\nbravo\ncharlie\n"), + }, + }, + { + name: "mode change", + patch: []byte(`diff --git a/mode.go b/mode.go +index 1234567..89abcde 100755 +old mode 100644 +new mode 100755 +--- a/mode.go ++++ b/mode.go +`), + tree: map[string][]byte{"mode.go": []byte("package mode\n")}, + wantTree: map[string][]byte{"mode.go": []byte("package mode\n")}, + }, + } + + for _, test := range tests { + test := test + t.Run(test.name, func(t *testing.T) { + t.Parallel() + + original := cloneTestTree(test.tree) + applied, err := applyPatchset(test.tree, test.patch) + require.NoError(t, err) + assert.Equal(t, test.wantTree, applied) + assert.Equal(t, original, test.tree) + }) + } +} + +func TestPatchsetApply_AtomicOnFailure(t *testing.T) { + t.Parallel() + + renamePatch := []byte(`diff --git a/src.txt b/dst.txt +similarity index 100% +rename from src.txt +rename to dst.txt +--- a/src.txt ++++ b/dst.txt +@@ -1,2 +1,2 @@ +-alpha ++bravo + charlie +`) + deletePatch := mustReadFile(t, filepath.Join("testdata", "significant", "rm.diff")) + patchsetData := append(append([]byte{}, renamePatch...), deletePatch...) + + tree := map[string][]byte{ + "src.txt": []byte("alpha\ncharlie\n"), + "keep.txt": []byte("keep\n"), + } + original := cloneTestTree(tree) + + applied, err := applyPatchset(tree, patchsetData) + require.Error(t, err) + assert.Nil(t, applied) + assert.Equal(t, original, tree) + assert.Contains(t, err.Error(), "missing file") +} + +func TestPatchsetApply_SameFilenameSequentialDiffs(t *testing.T) { + t.Parallel() + + patchData := []byte(`diff --git a/same_fn b/same_fn +--- a/same_fn ++++ b/same_fn +@@ -1,13 +1,13 @@ + a + b + c +-d ++z + e + f + g + h + i + j + k + l + m +diff --git a/same_fn b/same_fn +--- a/same_fn ++++ b/same_fn +@@ -1,13 +1,13 @@ + a + b + c + z +-e ++y + f + g + h + i + j + k + l + m +`) + + tree := map[string][]byte{ + "same_fn": []byte("a\nb\nc\nd\ne\nf\ng\nh\ni\nj\nk\nl\nm\n"), + } + + applied, err := applyPatchset(tree, patchData) + require.NoError(t, err) + assert.Equal(t, map[string][]byte{ + "same_fn": []byte("a\nb\nc\nz\ny\nf\ng\nh\ni\nj\nk\nl\nm\n"), + }, applied) +} + +func TestPatchsetApply_SameFilenameIndependentDiffs(t *testing.T) { + t.Parallel() + + patchData := []byte(`diff --git a/same_fn b/same_fn +--- a/same_fn ++++ b/same_fn +@@ -1,13 +1,13 @@ + a + b + c +-d ++z + e + f + g + h + i + j + k + l + m +diff --git a/same_fn b/same_fn +--- a/same_fn ++++ b/same_fn +@@ -6,8 +6,8 @@ f + g + h +-i ++y + j + k + l + m +`) + + tree := map[string][]byte{ + "same_fn": []byte("a\nb\nc\nd\ne\nf\ng\nh\ni\nj\nk\nl\nm\n"), + } + + applied, err := applyPatchset(tree, patchData) + require.NoError(t, err) + assert.Equal(t, map[string][]byte{ + "same_fn": []byte("a\nb\nc\nz\ne\nf\ng\nh\ny\nj\nk\nl\nm\n"), + }, applied) +} + +func TestPatchsetApply_RejectsBinaryPatches(t *testing.T) { + t.Parallel() + + _, err := applyPatchset( + map[string][]byte{"favicon-16x16-light.png": []byte("binary")}, + mustReadFile(t, filepath.Join("testdata", "significant", "binary-delta.diff")), + ) + require.Error(t, err) + assert.Contains(t, err.Error(), "binary patches are not supported") + + var unsupportedErr *unsupportedPatchError + require.ErrorAs(t, err, &unsupportedErr) + assert.ErrorIs(t, err, errPatchBinary) +} + +func cloneTestTree(tree map[string][]byte) map[string][]byte { + out := make(map[string][]byte, len(tree)) + for path, content := range tree { + out[path] = append([]byte(nil), content...) + } + return out +} diff --git a/testdata/apply/t4101/diff.0-1 b/testdata/apply/t4101/diff.0-1 new file mode 100644 index 0000000..1010a88 --- /dev/null +++ b/testdata/apply/t4101/diff.0-1 @@ -0,0 +1,6 @@ +--- a/frotz ++++ b/frotz +@@ -1,2 +1,3 @@ + a + b ++c diff --git a/testdata/apply/t4101/diff.0-2 b/testdata/apply/t4101/diff.0-2 new file mode 100644 index 0000000..36460a2 --- /dev/null +++ b/testdata/apply/t4101/diff.0-2 @@ -0,0 +1,7 @@ +--- a/frotz ++++ b/frotz +@@ -1,2 +1,2 @@ + a +-b ++b +\ No newline at end of file diff --git a/testdata/apply/t4101/diff.0-3 b/testdata/apply/t4101/diff.0-3 new file mode 100644 index 0000000..b281c43 --- /dev/null +++ b/testdata/apply/t4101/diff.0-3 @@ -0,0 +1,8 @@ +--- a/frotz ++++ b/frotz +@@ -1,2 +1,3 @@ + a +-b ++c ++b +\ No newline at end of file diff --git a/testdata/apply/t4101/diff.1-0 b/testdata/apply/t4101/diff.1-0 new file mode 100644 index 0000000..f0a2e92 --- /dev/null +++ b/testdata/apply/t4101/diff.1-0 @@ -0,0 +1,6 @@ +--- a/frotz ++++ b/frotz +@@ -1,3 +1,2 @@ + a + b +-c diff --git a/testdata/apply/t4101/diff.1-2 b/testdata/apply/t4101/diff.1-2 new file mode 100644 index 0000000..2a440a5 --- /dev/null +++ b/testdata/apply/t4101/diff.1-2 @@ -0,0 +1,8 @@ +--- a/frotz ++++ b/frotz +@@ -1,3 +1,2 @@ + a +-b +-c ++b +\ No newline at end of file diff --git a/testdata/apply/t4101/diff.1-3 b/testdata/apply/t4101/diff.1-3 new file mode 100644 index 0000000..61aff97 --- /dev/null +++ b/testdata/apply/t4101/diff.1-3 @@ -0,0 +1,8 @@ +--- a/frotz ++++ b/frotz +@@ -1,3 +1,3 @@ + a +-b + c ++b +\ No newline at end of file diff --git a/testdata/apply/t4101/diff.2-0 b/testdata/apply/t4101/diff.2-0 new file mode 100644 index 0000000..c2e71ee --- /dev/null +++ b/testdata/apply/t4101/diff.2-0 @@ -0,0 +1,7 @@ +--- a/frotz ++++ b/frotz +@@ -1,2 +1,2 @@ + a +-b +\ No newline at end of file ++b diff --git a/testdata/apply/t4101/diff.2-1 b/testdata/apply/t4101/diff.2-1 new file mode 100644 index 0000000..a66d9fd --- /dev/null +++ b/testdata/apply/t4101/diff.2-1 @@ -0,0 +1,8 @@ +--- a/frotz ++++ b/frotz +@@ -1,2 +1,3 @@ + a +-b +\ No newline at end of file ++b ++c diff --git a/testdata/apply/t4101/diff.2-3 b/testdata/apply/t4101/diff.2-3 new file mode 100644 index 0000000..5633c83 --- /dev/null +++ b/testdata/apply/t4101/diff.2-3 @@ -0,0 +1,7 @@ +--- a/frotz ++++ b/frotz +@@ -1,2 +1,3 @@ + a ++c + b +\ No newline at end of file diff --git a/testdata/apply/t4101/diff.3-0 b/testdata/apply/t4101/diff.3-0 new file mode 100644 index 0000000..10b1a41 --- /dev/null +++ b/testdata/apply/t4101/diff.3-0 @@ -0,0 +1,8 @@ +--- a/frotz ++++ b/frotz +@@ -1,3 +1,2 @@ + a +-c +-b +\ No newline at end of file ++b diff --git a/testdata/apply/t4101/diff.3-1 b/testdata/apply/t4101/diff.3-1 new file mode 100644 index 0000000..c799c60 --- /dev/null +++ b/testdata/apply/t4101/diff.3-1 @@ -0,0 +1,8 @@ +--- a/frotz ++++ b/frotz +@@ -1,3 +1,3 @@ + a ++b + c +-b +\ No newline at end of file diff --git a/testdata/apply/t4101/diff.3-2 b/testdata/apply/t4101/diff.3-2 new file mode 100644 index 0000000..f8d1ba6 --- /dev/null +++ b/testdata/apply/t4101/diff.3-2 @@ -0,0 +1,7 @@ +--- a/frotz ++++ b/frotz +@@ -1,3 +1,2 @@ + a +-c + b +\ No newline at end of file diff --git a/testdata/apply/text_fragment_add_end.out b/testdata/apply/text_fragment_add_end.out new file mode 100644 index 0000000..648fd44 --- /dev/null +++ b/testdata/apply/text_fragment_add_end.out @@ -0,0 +1,5 @@ +line 1 +line 2 +line 3 +new line a +new line b diff --git a/testdata/apply/text_fragment_add_end.patch b/testdata/apply/text_fragment_add_end.patch new file mode 100644 index 0000000..de708be --- /dev/null +++ b/testdata/apply/text_fragment_add_end.patch @@ -0,0 +1,9 @@ +diff --git a/gitdiff/testdata/apply/fragment_add_end.src b/gitdiff/testdata/apply/fragment_add_end.src +--- a/gitdiff/testdata/apply/fragment_add_end.src ++++ b/gitdiff/testdata/apply/fragment_add_end.src +@@ -1,3 +1,5 @@ + line 1 + line 2 + line 3 ++new line a ++new line b diff --git a/testdata/apply/text_fragment_add_end.src b/testdata/apply/text_fragment_add_end.src new file mode 100644 index 0000000..a92d664 --- /dev/null +++ b/testdata/apply/text_fragment_add_end.src @@ -0,0 +1,3 @@ +line 1 +line 2 +line 3 diff --git a/testdata/apply/text_fragment_add_end_noeol.out b/testdata/apply/text_fragment_add_end_noeol.out new file mode 100644 index 0000000..94c99a3 --- /dev/null +++ b/testdata/apply/text_fragment_add_end_noeol.out @@ -0,0 +1,5 @@ +line 1 +line 2 +line 3 +line 4 +line 5 diff --git a/testdata/apply/text_fragment_add_end_noeol.patch b/testdata/apply/text_fragment_add_end_noeol.patch new file mode 100644 index 0000000..ec3cea4 --- /dev/null +++ b/testdata/apply/text_fragment_add_end_noeol.patch @@ -0,0 +1,11 @@ +diff --git a/gitdiff/testdata/apply/text_fragment_add_end_noeol.src b/gitdiff/testdata/apply/text_fragment_add_end_noeol.src +--- a/gitdiff/testdata/apply/text_fragment_add_end_noeol.src ++++ b/gitdiff/testdata/apply/text_fragment_add_end_noeol.src +@@ -1,3 +1,5 @@ + line 1 + line 2 +-line 3 +\ No newline at end of file ++line 3 ++line 4 ++line 5 diff --git a/testdata/apply/text_fragment_add_end_noeol.src b/testdata/apply/text_fragment_add_end_noeol.src new file mode 100644 index 0000000..8cf2f17 --- /dev/null +++ b/testdata/apply/text_fragment_add_end_noeol.src @@ -0,0 +1,3 @@ +line 1 +line 2 +line 3 \ No newline at end of file diff --git a/testdata/apply/text_fragment_add_middle.out b/testdata/apply/text_fragment_add_middle.out new file mode 100644 index 0000000..ded20d8 --- /dev/null +++ b/testdata/apply/text_fragment_add_middle.out @@ -0,0 +1,5 @@ +line 1 +line 2 +new line a +new line b +line 3 diff --git a/testdata/apply/text_fragment_add_middle.patch b/testdata/apply/text_fragment_add_middle.patch new file mode 100644 index 0000000..43aee3b --- /dev/null +++ b/testdata/apply/text_fragment_add_middle.patch @@ -0,0 +1,9 @@ +diff --git a/gitdiff/testdata/apply/fragment_add_middle.src b/gitdiff/testdata/apply/fragment_add_middle.src +--- a/gitdiff/testdata/apply/fragment_add_middle.src ++++ b/gitdiff/testdata/apply/fragment_add_middle.src +@@ -1,3 +1,5 @@ + line 1 + line 2 ++new line a ++new line b + line 3 diff --git a/testdata/apply/text_fragment_add_middle.src b/testdata/apply/text_fragment_add_middle.src new file mode 100644 index 0000000..a92d664 --- /dev/null +++ b/testdata/apply/text_fragment_add_middle.src @@ -0,0 +1,3 @@ +line 1 +line 2 +line 3 diff --git a/testdata/apply/text_fragment_add_start.out b/testdata/apply/text_fragment_add_start.out new file mode 100644 index 0000000..b153f60 --- /dev/null +++ b/testdata/apply/text_fragment_add_start.out @@ -0,0 +1,4 @@ +new line a +line 1 +line 2 +line 3 diff --git a/testdata/apply/text_fragment_add_start.patch b/testdata/apply/text_fragment_add_start.patch new file mode 100644 index 0000000..5218764 --- /dev/null +++ b/testdata/apply/text_fragment_add_start.patch @@ -0,0 +1,8 @@ +diff --git a/gitdiff/testdata/apply/fragment_add_start.src b/gitdiff/testdata/apply/fragment_add_start.src +--- a/gitdiff/testdata/apply/fragment_add_start.src ++++ b/gitdiff/testdata/apply/fragment_add_start.src +@@ -1,3 +1,4 @@ ++new line a + line 1 + line 2 + line 3 diff --git a/testdata/apply/text_fragment_add_start.src b/testdata/apply/text_fragment_add_start.src new file mode 100644 index 0000000..a92d664 --- /dev/null +++ b/testdata/apply/text_fragment_add_start.src @@ -0,0 +1,3 @@ +line 1 +line 2 +line 3 diff --git a/testdata/apply/text_fragment_change_end.out b/testdata/apply/text_fragment_change_end.out new file mode 100644 index 0000000..e3cbece --- /dev/null +++ b/testdata/apply/text_fragment_change_end.out @@ -0,0 +1,10 @@ +line 1 +line 2 +line 3 +line 4 +line 5 +line 6 +line 7 +line 8 +line 9 +new line a diff --git a/testdata/apply/text_fragment_change_end.patch b/testdata/apply/text_fragment_change_end.patch new file mode 100644 index 0000000..5655880 --- /dev/null +++ b/testdata/apply/text_fragment_change_end.patch @@ -0,0 +1,9 @@ +diff --git a/gitdiff/testdata/apply/text_fragment_change_end.src b/gitdiff/testdata/apply/text_fragment_change_end.src +--- a/gitdiff/testdata/apply/text_fragment_change_end.src ++++ b/gitdiff/testdata/apply/text_fragment_change_end.src +@@ -7,4 +7,4 @@ line 6 + line 7 + line 8 + line 9 +-line 10 ++new line a diff --git a/testdata/apply/text_fragment_change_end.src b/testdata/apply/text_fragment_change_end.src new file mode 100644 index 0000000..fa2da6e --- /dev/null +++ b/testdata/apply/text_fragment_change_end.src @@ -0,0 +1,10 @@ +line 1 +line 2 +line 3 +line 4 +line 5 +line 6 +line 7 +line 8 +line 9 +line 10 diff --git a/testdata/apply/text_fragment_change_end_eol.out b/testdata/apply/text_fragment_change_end_eol.out new file mode 100644 index 0000000..8cf2f17 --- /dev/null +++ b/testdata/apply/text_fragment_change_end_eol.out @@ -0,0 +1,3 @@ +line 1 +line 2 +line 3 \ No newline at end of file diff --git a/testdata/apply/text_fragment_change_end_eol.patch b/testdata/apply/text_fragment_change_end_eol.patch new file mode 100644 index 0000000..f1c9477 --- /dev/null +++ b/testdata/apply/text_fragment_change_end_eol.patch @@ -0,0 +1,10 @@ +diff --git a/gitdiff/testdata/apply/text_fragment_remove_last_eol.src b/gitdiff/testdata/apply/text_fragment_remove_last_eol.src +index a92d664..8cf2f17 100644 +--- a/gitdiff/testdata/apply/text_fragment_remove_last_eol.src ++++ b/gitdiff/testdata/apply/text_fragment_remove_last_eol.src +@@ -1,3 +1,3 @@ + line 1 + line 2 +-line 3 ++line 3 +\ No newline at end of file diff --git a/testdata/apply/text_fragment_change_end_eol.src b/testdata/apply/text_fragment_change_end_eol.src new file mode 100644 index 0000000..a92d664 --- /dev/null +++ b/testdata/apply/text_fragment_change_end_eol.src @@ -0,0 +1,3 @@ +line 1 +line 2 +line 3 diff --git a/testdata/apply/text_fragment_change_exact.out b/testdata/apply/text_fragment_change_exact.out new file mode 100644 index 0000000..4655a0a --- /dev/null +++ b/testdata/apply/text_fragment_change_exact.out @@ -0,0 +1,19 @@ +line +line +line +line +line +line +line +line +line +line +line +line +line +line +line +new line a +line +line +line diff --git a/testdata/apply/text_fragment_change_exact.patch b/testdata/apply/text_fragment_change_exact.patch new file mode 100644 index 0000000..395de4d --- /dev/null +++ b/testdata/apply/text_fragment_change_exact.patch @@ -0,0 +1,12 @@ +diff --git a/gitdiff/testdata/apply/text_fragment_change_exact.src b/gitdiff/testdata/apply/text_fragment_change_exact.src +--- a/gitdiff/testdata/apply/text_fragment_change_exact.src ++++ b/gitdiff/testdata/apply/text_fragment_change_exact.src +@@ -13,7 +13,7 @@ line + line + line + line +-line ++new line a + line + line + line diff --git a/testdata/apply/text_fragment_change_exact.src b/testdata/apply/text_fragment_change_exact.src new file mode 100644 index 0000000..316a8f0 --- /dev/null +++ b/testdata/apply/text_fragment_change_exact.src @@ -0,0 +1,30 @@ +line +line +line +line +line +line +line +line +line +line +line +line +line +line +line +line +line +line +line +line +line +line +line +line +line +line +line +line +line +line diff --git a/testdata/apply/text_fragment_change_middle.out b/testdata/apply/text_fragment_change_middle.out new file mode 100644 index 0000000..fd0a9ad --- /dev/null +++ b/testdata/apply/text_fragment_change_middle.out @@ -0,0 +1,9 @@ +line 1 +line 2 +line 3 +line 4 +line 5 +new line a +line 7 +line 8 +line 9 diff --git a/testdata/apply/text_fragment_change_middle.patch b/testdata/apply/text_fragment_change_middle.patch new file mode 100644 index 0000000..139a0fe --- /dev/null +++ b/testdata/apply/text_fragment_change_middle.patch @@ -0,0 +1,12 @@ +diff --git a/gitdiff/testdata/apply/text_fragment_change_middle.src b/gitdiff/testdata/apply/text_fragment_change_middle.src +--- a/gitdiff/testdata/apply/text_fragment_change_middle.src ++++ b/gitdiff/testdata/apply/text_fragment_change_middle.src +@@ -3,7 +3,7 @@ line 2 + line 3 + line 4 + line 5 +-line 6 ++new line a + line 7 + line 8 + line 9 diff --git a/testdata/apply/text_fragment_change_middle.src b/testdata/apply/text_fragment_change_middle.src new file mode 100644 index 0000000..fa2da6e --- /dev/null +++ b/testdata/apply/text_fragment_change_middle.src @@ -0,0 +1,10 @@ +line 1 +line 2 +line 3 +line 4 +line 5 +line 6 +line 7 +line 8 +line 9 +line 10 diff --git a/testdata/apply/text_fragment_change_single_noeol.out b/testdata/apply/text_fragment_change_single_noeol.out new file mode 100644 index 0000000..ed59e08 --- /dev/null +++ b/testdata/apply/text_fragment_change_single_noeol.out @@ -0,0 +1 @@ +new line a \ No newline at end of file diff --git a/testdata/apply/text_fragment_change_single_noeol.patch b/testdata/apply/text_fragment_change_single_noeol.patch new file mode 100644 index 0000000..f945234 --- /dev/null +++ b/testdata/apply/text_fragment_change_single_noeol.patch @@ -0,0 +1,8 @@ +diff --git a/gitdiff/testdata/apply/text_fragment_change_single_noeol.src b/gitdiff/testdata/apply/text_fragment_change_single_noeol.src +--- a/gitdiff/testdata/apply/text_fragment_change_single_noeol.src ++++ b/gitdiff/testdata/apply/text_fragment_change_single_noeol.src +@@ -1 +1 @@ +-line 1 +\ No newline at end of file ++new line a +\ No newline at end of file diff --git a/testdata/apply/text_fragment_change_single_noeol.src b/testdata/apply/text_fragment_change_single_noeol.src new file mode 100644 index 0000000..dcf168c --- /dev/null +++ b/testdata/apply/text_fragment_change_single_noeol.src @@ -0,0 +1 @@ +line 1 \ No newline at end of file diff --git a/testdata/apply/text_fragment_change_start.out b/testdata/apply/text_fragment_change_start.out new file mode 100644 index 0000000..5156941 --- /dev/null +++ b/testdata/apply/text_fragment_change_start.out @@ -0,0 +1,4 @@ +new line a +line 2 +line 3 +line 4 diff --git a/testdata/apply/text_fragment_change_start.patch b/testdata/apply/text_fragment_change_start.patch new file mode 100644 index 0000000..d0a6653 --- /dev/null +++ b/testdata/apply/text_fragment_change_start.patch @@ -0,0 +1,9 @@ +diff --git a/gitdiff/testdata/apply/text_fragment_change_start.src b/gitdiff/testdata/apply/text_fragment_change_start.src +--- a/gitdiff/testdata/apply/text_fragment_change_start.src ++++ b/gitdiff/testdata/apply/text_fragment_change_start.src +@@ -1,4 +1,4 @@ +-line 1 ++new line a + line 2 + line 3 + line 4 diff --git a/testdata/apply/text_fragment_change_start.src b/testdata/apply/text_fragment_change_start.src new file mode 100644 index 0000000..fa2da6e --- /dev/null +++ b/testdata/apply/text_fragment_change_start.src @@ -0,0 +1,10 @@ +line 1 +line 2 +line 3 +line 4 +line 5 +line 6 +line 7 +line 8 +line 9 +line 10 diff --git a/testdata/apply/text_fragment_delete_all.out b/testdata/apply/text_fragment_delete_all.out new file mode 100644 index 0000000..e69de29 diff --git a/testdata/apply/text_fragment_delete_all.patch b/testdata/apply/text_fragment_delete_all.patch new file mode 100644 index 0000000..8a2fb9c --- /dev/null +++ b/testdata/apply/text_fragment_delete_all.patch @@ -0,0 +1,8 @@ +diff --git a/gitdiff/testdata/apply/fragment_delete_all.src b/gitdiff/testdata/apply/fragment_delete_all.src +--- a/gitdiff/testdata/apply/fragment_delete_all.src ++++ b/gitdiff/testdata/apply/fragment_delete_all.src +@@ -1,4 +0,0 @@ +-line a +-line b +-line c +-line d diff --git a/testdata/apply/text_fragment_delete_all.src b/testdata/apply/text_fragment_delete_all.src new file mode 100644 index 0000000..47d03ac --- /dev/null +++ b/testdata/apply/text_fragment_delete_all.src @@ -0,0 +1,4 @@ +line a +line b +line c +line d diff --git a/testdata/apply/text_fragment_error.src b/testdata/apply/text_fragment_error.src new file mode 100644 index 0000000..f8b6f0a --- /dev/null +++ b/testdata/apply/text_fragment_error.src @@ -0,0 +1,13 @@ +line 1 +line 2 +line 3 +line 4 +line 5 +line 6 +line 7 +line 8 +line 9 +line 10 +line 11 +line 12 +line 13 diff --git a/testdata/apply/text_fragment_error_context_conflict.patch b/testdata/apply/text_fragment_error_context_conflict.patch new file mode 100644 index 0000000..a262796 --- /dev/null +++ b/testdata/apply/text_fragment_error_context_conflict.patch @@ -0,0 +1,12 @@ +diff --git a/gitdiff/testdata/apply/text_fragment_error.src b/gitdiff/testdata/apply/text_fragment_error.src +--- a/gitdiff/testdata/apply/text_fragment_error.src ++++ b/gitdiff/testdata/apply/text_fragment_error.src +@@ -4,7 +4,7 @@ line 3 + line 4 + line 5 + line conflict +-line 7 ++new line a + line 8 + line 9 + line 10 diff --git a/testdata/apply/text_fragment_error_delete_conflict.patch b/testdata/apply/text_fragment_error_delete_conflict.patch new file mode 100644 index 0000000..17ea166 --- /dev/null +++ b/testdata/apply/text_fragment_error_delete_conflict.patch @@ -0,0 +1,12 @@ +diff --git a/gitdiff/testdata/apply/text_fragment_error.src b/gitdiff/testdata/apply/text_fragment_error.src +--- a/gitdiff/testdata/apply/text_fragment_error.src ++++ b/gitdiff/testdata/apply/text_fragment_error.src +@@ -4,7 +4,7 @@ line 3 + line 4 + line 5 + line 6 +-line conflict ++new line a + line 8 + line 9 + line 10 diff --git a/testdata/apply/text_fragment_error_new_file.patch b/testdata/apply/text_fragment_error_new_file.patch new file mode 100644 index 0000000..f4fbee6 --- /dev/null +++ b/testdata/apply/text_fragment_error_new_file.patch @@ -0,0 +1,7 @@ +diff --git a/gitdiff/testdata/apply/text_fragment_error.src b/gitdiff/testdata/apply/text_fragment_error.src +--- a/gitdiff/testdata/apply/text_fragment_error.src ++++ b/gitdiff/testdata/apply/text_fragment_error.src +@@ -0,0 +1,3 @@ ++line 1 ++line 2 ++line 3 diff --git a/testdata/apply/text_fragment_error_short_src.patch b/testdata/apply/text_fragment_error_short_src.patch new file mode 100644 index 0000000..bfe7b96 --- /dev/null +++ b/testdata/apply/text_fragment_error_short_src.patch @@ -0,0 +1,12 @@ +diff --git a/gitdiff/testdata/apply/text_fragment_error.src b/gitdiff/testdata/apply/text_fragment_error.src +--- a/gitdiff/testdata/apply/text_fragment_error.src ++++ b/gitdiff/testdata/apply/text_fragment_error.src +@@ -9,7 +9,7 @@ line 8 + line 9 + line 10 + line 11 +-line 12 ++new line a + line 13 + line 14 + line 15 diff --git a/testdata/apply/text_fragment_error_short_src_before.patch b/testdata/apply/text_fragment_error_short_src_before.patch new file mode 100644 index 0000000..0a96018 --- /dev/null +++ b/testdata/apply/text_fragment_error_short_src_before.patch @@ -0,0 +1,12 @@ +diff --git a/gitdiff/testdata/apply/text_fragment_error.src b/gitdiff/testdata/apply/text_fragment_error.src +--- a/gitdiff/testdata/apply/text_fragment_error.src ++++ b/gitdiff/testdata/apply/text_fragment_error.src +@@ -15,7 +15,7 @@ line 14 + line 15 + line 16 + line 17 +-line 18 ++new line a + line 19 + line 20 + line 21 diff --git a/testdata/apply/text_fragment_new.out b/testdata/apply/text_fragment_new.out new file mode 100644 index 0000000..a92d664 --- /dev/null +++ b/testdata/apply/text_fragment_new.out @@ -0,0 +1,3 @@ +line 1 +line 2 +line 3 diff --git a/testdata/apply/text_fragment_new.patch b/testdata/apply/text_fragment_new.patch new file mode 100644 index 0000000..c87487b --- /dev/null +++ b/testdata/apply/text_fragment_new.patch @@ -0,0 +1,7 @@ +diff --git a/gitdiff/testdata/apply/fragment_new.src b/gitdiff/testdata/apply/fragment_new.src +--- a/gitdiff/testdata/apply/fragment_new.src ++++ b/gitdiff/testdata/apply/fragment_new.src +@@ -0,0 +1,3 @@ ++line 1 ++line 2 ++line 3 diff --git a/testdata/apply/text_fragment_new.src b/testdata/apply/text_fragment_new.src new file mode 100644 index 0000000..e69de29 diff --git a/testdata/parity/anchor-beginning/fixture.json b/testdata/parity/anchor-beginning/fixture.json new file mode 100644 index 0000000..bc769ad --- /dev/null +++ b/testdata/parity/anchor-beginning/fixture.json @@ -0,0 +1,3 @@ +{ + "gitArgs": [] +} diff --git a/testdata/parity/anchor-beginning/out b/testdata/parity/anchor-beginning/out new file mode 100644 index 0000000..3d4991a --- /dev/null +++ b/testdata/parity/anchor-beginning/out @@ -0,0 +1,3 @@ +ALPHA +beta +gamma diff --git a/testdata/parity/anchor-beginning/patch b/testdata/parity/anchor-beginning/patch new file mode 100644 index 0000000..8640581 --- /dev/null +++ b/testdata/parity/anchor-beginning/patch @@ -0,0 +1,8 @@ +diff --git a/file.txt b/file.txt +--- a/file.txt ++++ b/file.txt +@@ -1,3 +1,3 @@ +-alpha ++ALPHA + beta + gamma diff --git a/testdata/parity/anchor-beginning/src b/testdata/parity/anchor-beginning/src new file mode 100644 index 0000000..85c3040 --- /dev/null +++ b/testdata/parity/anchor-beginning/src @@ -0,0 +1,3 @@ +alpha +beta +gamma diff --git a/testdata/parity/anchor-end/fixture.json b/testdata/parity/anchor-end/fixture.json new file mode 100644 index 0000000..bc769ad --- /dev/null +++ b/testdata/parity/anchor-end/fixture.json @@ -0,0 +1,3 @@ +{ + "gitArgs": [] +} diff --git a/testdata/parity/anchor-end/out b/testdata/parity/anchor-end/out new file mode 100644 index 0000000..7a28df3 --- /dev/null +++ b/testdata/parity/anchor-end/out @@ -0,0 +1,4 @@ +alpha +beta +gamma +delta diff --git a/testdata/parity/anchor-end/patch b/testdata/parity/anchor-end/patch new file mode 100644 index 0000000..fb9a282 --- /dev/null +++ b/testdata/parity/anchor-end/patch @@ -0,0 +1,8 @@ +diff --git a/file.txt b/file.txt +--- a/file.txt ++++ b/file.txt +@@ -1,3 +1,4 @@ + alpha + beta + gamma ++delta diff --git a/testdata/parity/anchor-end/src b/testdata/parity/anchor-end/src new file mode 100644 index 0000000..85c3040 --- /dev/null +++ b/testdata/parity/anchor-end/src @@ -0,0 +1,3 @@ +alpha +beta +gamma diff --git a/testdata/parity/atomic-failure-vs-reject/fixture.json b/testdata/parity/atomic-failure-vs-reject/fixture.json new file mode 100644 index 0000000..d5f3df1 --- /dev/null +++ b/testdata/parity/atomic-failure-vs-reject/fixture.json @@ -0,0 +1,5 @@ +{ + "gitArgs": [], + "expectConflict": true, + "checkReject": true +} diff --git a/testdata/parity/atomic-failure-vs-reject/out b/testdata/parity/atomic-failure-vs-reject/out new file mode 100644 index 0000000..f5607bb --- /dev/null +++ b/testdata/parity/atomic-failure-vs-reject/out @@ -0,0 +1,6 @@ +line1 +LINE2 +line3 +line4 +LINE5-DRIFT +line6 diff --git a/testdata/parity/atomic-failure-vs-reject/patch b/testdata/parity/atomic-failure-vs-reject/patch new file mode 100644 index 0000000..6d12d79 --- /dev/null +++ b/testdata/parity/atomic-failure-vs-reject/patch @@ -0,0 +1,13 @@ +diff --git a/file.txt b/file.txt +--- a/file.txt ++++ b/file.txt +@@ -1,3 +1,3 @@ + line1 +-line2 ++LINE2 + line3 +@@ -4,3 +4,3 @@ + line4 +-line5 ++LINE5 + line6 diff --git a/testdata/parity/atomic-failure-vs-reject/rej b/testdata/parity/atomic-failure-vs-reject/rej new file mode 100644 index 0000000..0a61237 --- /dev/null +++ b/testdata/parity/atomic-failure-vs-reject/rej @@ -0,0 +1,5 @@ +@@ -4,3 +4,3 @@ + line4 +-line5 ++LINE5 + line6 diff --git a/testdata/parity/atomic-failure-vs-reject/src b/testdata/parity/atomic-failure-vs-reject/src new file mode 100644 index 0000000..01a6b8d --- /dev/null +++ b/testdata/parity/atomic-failure-vs-reject/src @@ -0,0 +1,6 @@ +line1 +line2 +line3 +line4 +LINE5-DRIFT +line6 diff --git a/testdata/parity/context-reduced-beginning-anchor-clear/fixture.json b/testdata/parity/context-reduced-beginning-anchor-clear/fixture.json new file mode 100644 index 0000000..602dcdf --- /dev/null +++ b/testdata/parity/context-reduced-beginning-anchor-clear/fixture.json @@ -0,0 +1,5 @@ +{ + "gitArgs": [ + "-C1" + ] +} diff --git a/testdata/parity/context-reduced-beginning-anchor-clear/out b/testdata/parity/context-reduced-beginning-anchor-clear/out new file mode 100644 index 0000000..2ee876f --- /dev/null +++ b/testdata/parity/context-reduced-beginning-anchor-clear/out @@ -0,0 +1,4 @@ +banner +ALPHA +beta +gamma diff --git a/testdata/parity/context-reduced-beginning-anchor-clear/patch b/testdata/parity/context-reduced-beginning-anchor-clear/patch new file mode 100644 index 0000000..8640581 --- /dev/null +++ b/testdata/parity/context-reduced-beginning-anchor-clear/patch @@ -0,0 +1,8 @@ +diff --git a/file.txt b/file.txt +--- a/file.txt ++++ b/file.txt +@@ -1,3 +1,3 @@ +-alpha ++ALPHA + beta + gamma diff --git a/testdata/parity/context-reduced-beginning-anchor-clear/src b/testdata/parity/context-reduced-beginning-anchor-clear/src new file mode 100644 index 0000000..d9bdb0e --- /dev/null +++ b/testdata/parity/context-reduced-beginning-anchor-clear/src @@ -0,0 +1,4 @@ +banner +alpha +beta +gamma diff --git a/testdata/parity/context-reduced-both/fixture.json b/testdata/parity/context-reduced-both/fixture.json new file mode 100644 index 0000000..602dcdf --- /dev/null +++ b/testdata/parity/context-reduced-both/fixture.json @@ -0,0 +1,5 @@ +{ + "gitArgs": [ + "-C1" + ] +} diff --git a/testdata/parity/context-reduced-both/out b/testdata/parity/context-reduced-both/out new file mode 100644 index 0000000..de4be80 --- /dev/null +++ b/testdata/parity/context-reduced-both/out @@ -0,0 +1,7 @@ +a0 +A1 +a2 +A3 +a4 +A5 +a6 diff --git a/testdata/parity/context-reduced-both/patch b/testdata/parity/context-reduced-both/patch new file mode 100644 index 0000000..022366b --- /dev/null +++ b/testdata/parity/context-reduced-both/patch @@ -0,0 +1,10 @@ +diff --git a/file.txt b/file.txt +--- a/file.txt ++++ b/file.txt +@@ -2,5 +2,5 @@ + a1 + a2 +-a3 ++A3 + a4 + a5 diff --git a/testdata/parity/context-reduced-both/src b/testdata/parity/context-reduced-both/src new file mode 100644 index 0000000..483bdf4 --- /dev/null +++ b/testdata/parity/context-reduced-both/src @@ -0,0 +1,7 @@ +a0 +A1 +a2 +a3 +a4 +A5 +a6 diff --git a/testdata/parity/context-reduced-end-anchor-clear/fixture.json b/testdata/parity/context-reduced-end-anchor-clear/fixture.json new file mode 100644 index 0000000..602dcdf --- /dev/null +++ b/testdata/parity/context-reduced-end-anchor-clear/fixture.json @@ -0,0 +1,5 @@ +{ + "gitArgs": [ + "-C1" + ] +} diff --git a/testdata/parity/context-reduced-end-anchor-clear/out b/testdata/parity/context-reduced-end-anchor-clear/out new file mode 100644 index 0000000..843507b --- /dev/null +++ b/testdata/parity/context-reduced-end-anchor-clear/out @@ -0,0 +1,4 @@ +alpha +beta +GAMMA +footer diff --git a/testdata/parity/context-reduced-end-anchor-clear/patch b/testdata/parity/context-reduced-end-anchor-clear/patch new file mode 100644 index 0000000..9f93935 --- /dev/null +++ b/testdata/parity/context-reduced-end-anchor-clear/patch @@ -0,0 +1,8 @@ +diff --git a/file.txt b/file.txt +--- a/file.txt ++++ b/file.txt +@@ -1,3 +1,3 @@ + alpha + beta +-gamma ++GAMMA diff --git a/testdata/parity/context-reduced-end-anchor-clear/src b/testdata/parity/context-reduced-end-anchor-clear/src new file mode 100644 index 0000000..9789145 --- /dev/null +++ b/testdata/parity/context-reduced-end-anchor-clear/src @@ -0,0 +1,4 @@ +alpha +beta +gamma +footer diff --git a/testdata/parity/context-reduced-leading/fixture.json b/testdata/parity/context-reduced-leading/fixture.json new file mode 100644 index 0000000..602dcdf --- /dev/null +++ b/testdata/parity/context-reduced-leading/fixture.json @@ -0,0 +1,5 @@ +{ + "gitArgs": [ + "-C1" + ] +} diff --git a/testdata/parity/context-reduced-leading/out b/testdata/parity/context-reduced-leading/out new file mode 100644 index 0000000..e452713 --- /dev/null +++ b/testdata/parity/context-reduced-leading/out @@ -0,0 +1,7 @@ +a0 +A1 +a2 +A3 +a4 +a5 +a6 diff --git a/testdata/parity/context-reduced-leading/patch b/testdata/parity/context-reduced-leading/patch new file mode 100644 index 0000000..022366b --- /dev/null +++ b/testdata/parity/context-reduced-leading/patch @@ -0,0 +1,10 @@ +diff --git a/file.txt b/file.txt +--- a/file.txt ++++ b/file.txt +@@ -2,5 +2,5 @@ + a1 + a2 +-a3 ++A3 + a4 + a5 diff --git a/testdata/parity/context-reduced-leading/src b/testdata/parity/context-reduced-leading/src new file mode 100644 index 0000000..82da92e --- /dev/null +++ b/testdata/parity/context-reduced-leading/src @@ -0,0 +1,7 @@ +a0 +A1 +a2 +a3 +a4 +a5 +a6 diff --git a/testdata/parity/context-reduced-trailing/fixture.json b/testdata/parity/context-reduced-trailing/fixture.json new file mode 100644 index 0000000..602dcdf --- /dev/null +++ b/testdata/parity/context-reduced-trailing/fixture.json @@ -0,0 +1,5 @@ +{ + "gitArgs": [ + "-C1" + ] +} diff --git a/testdata/parity/context-reduced-trailing/out b/testdata/parity/context-reduced-trailing/out new file mode 100644 index 0000000..5d1a833 --- /dev/null +++ b/testdata/parity/context-reduced-trailing/out @@ -0,0 +1,7 @@ +a0 +a1 +a2 +A3 +a4 +A5 +a6 diff --git a/testdata/parity/context-reduced-trailing/patch b/testdata/parity/context-reduced-trailing/patch new file mode 100644 index 0000000..022366b --- /dev/null +++ b/testdata/parity/context-reduced-trailing/patch @@ -0,0 +1,10 @@ +diff --git a/file.txt b/file.txt +--- a/file.txt ++++ b/file.txt +@@ -2,5 +2,5 @@ + a1 + a2 +-a3 ++A3 + a4 + a5 diff --git a/testdata/parity/context-reduced-trailing/src b/testdata/parity/context-reduced-trailing/src new file mode 100644 index 0000000..7ff9f6e --- /dev/null +++ b/testdata/parity/context-reduced-trailing/src @@ -0,0 +1,7 @@ +a0 +a1 +a2 +a3 +a4 +A5 +a6 diff --git a/testdata/parity/copy-text/fixture.json b/testdata/parity/copy-text/fixture.json new file mode 100644 index 0000000..f994d06 --- /dev/null +++ b/testdata/parity/copy-text/fixture.json @@ -0,0 +1,10 @@ +{ + "skipLibrary": true, + "srcFiles": { + "source.txt": "alpha\nbravo\n" + }, + "outFiles": { + "copy.txt": "alpha\nbravo\n", + "source.txt": "alpha\nbravo\n" + } +} diff --git a/testdata/parity/copy-text/patch b/testdata/parity/copy-text/patch new file mode 100644 index 0000000..9310f13 --- /dev/null +++ b/testdata/parity/copy-text/patch @@ -0,0 +1,4 @@ +diff --git a/source.txt b/copy.txt +similarity index 100% +copy from source.txt +copy to copy.txt diff --git a/testdata/parity/create-text/fixture.json b/testdata/parity/create-text/fixture.json new file mode 100644 index 0000000..4f1864d --- /dev/null +++ b/testdata/parity/create-text/fixture.json @@ -0,0 +1,7 @@ +{ + "skipLibrary": true, + "srcFiles": {}, + "outFiles": { + "new.txt": "alpha\nbravo\n" + } +} diff --git a/testdata/parity/create-text/patch b/testdata/parity/create-text/patch new file mode 100644 index 0000000..e7ab742 --- /dev/null +++ b/testdata/parity/create-text/patch @@ -0,0 +1,8 @@ +diff --git a/new.txt b/new.txt +new file mode 100644 +index 0000000..0000000 +--- /dev/null ++++ b/new.txt +@@ -0,0 +1,2 @@ ++alpha ++bravo diff --git a/testdata/parity/delete-text/fixture.json b/testdata/parity/delete-text/fixture.json new file mode 100644 index 0000000..21084bd --- /dev/null +++ b/testdata/parity/delete-text/fixture.json @@ -0,0 +1,7 @@ +{ + "skipLibrary": true, + "srcFiles": { + "gone.txt": "alpha\nbravo\n" + }, + "outFiles": {} +} diff --git a/testdata/parity/delete-text/patch b/testdata/parity/delete-text/patch new file mode 100644 index 0000000..4725013 --- /dev/null +++ b/testdata/parity/delete-text/patch @@ -0,0 +1,8 @@ +diff --git a/gone.txt b/gone.txt +deleted file mode 100644 +index 0000000..0000000 +--- a/gone.txt ++++ /dev/null +@@ -1,2 +0,0 @@ +-alpha +-bravo diff --git a/testdata/parity/mode-change/fixture.json b/testdata/parity/mode-change/fixture.json new file mode 100644 index 0000000..9f2933e --- /dev/null +++ b/testdata/parity/mode-change/fixture.json @@ -0,0 +1,15 @@ +{ + "skipLibrary": true, + "srcFiles": { + "script.sh": "echo hi\n" + }, + "srcModes": { + "script.sh": "100644" + }, + "outFiles": { + "script.sh": "echo hi\n" + }, + "outModes": { + "script.sh": "100755" + } +} diff --git a/testdata/parity/mode-change/patch b/testdata/parity/mode-change/patch new file mode 100644 index 0000000..addf924 --- /dev/null +++ b/testdata/parity/mode-change/patch @@ -0,0 +1,3 @@ +diff --git a/script.sh b/script.sh +old mode 100644 +new mode 100755 diff --git a/testdata/parity/rename-text/fixture.json b/testdata/parity/rename-text/fixture.json new file mode 100644 index 0000000..2391f18 --- /dev/null +++ b/testdata/parity/rename-text/fixture.json @@ -0,0 +1,9 @@ +{ + "skipLibrary": true, + "srcFiles": { + "old.txt": "alpha\nbravo\n" + }, + "outFiles": { + "new.txt": "alpha\nbravo\n" + } +} diff --git a/testdata/parity/rename-text/patch b/testdata/parity/rename-text/patch new file mode 100644 index 0000000..45cf9cd --- /dev/null +++ b/testdata/parity/rename-text/patch @@ -0,0 +1,4 @@ +diff --git a/old.txt b/new.txt +similarity index 100% +rename from old.txt +rename to new.txt diff --git a/testdata/parity/reverse-apply-text/fixture.json b/testdata/parity/reverse-apply-text/fixture.json new file mode 100644 index 0000000..ff6ab4b --- /dev/null +++ b/testdata/parity/reverse-apply-text/fixture.json @@ -0,0 +1,12 @@ +{ + "skipLibrary": true, + "gitArgs": [ + "--reverse" + ], + "srcFiles": { + "file.txt": "z\nb\n" + }, + "outFiles": { + "file.txt": "a\nb\n" + } +} diff --git a/testdata/parity/reverse-apply-text/patch b/testdata/parity/reverse-apply-text/patch new file mode 100644 index 0000000..9f45e3f --- /dev/null +++ b/testdata/parity/reverse-apply-text/patch @@ -0,0 +1,7 @@ +diff --git a/file.txt b/file.txt +--- a/file.txt ++++ b/file.txt +@@ -1,2 +1,2 @@ +-a ++z + b diff --git a/testdata/parity/reverse-apply/fixture.json b/testdata/parity/reverse-apply/fixture.json new file mode 100644 index 0000000..bc769ad --- /dev/null +++ b/testdata/parity/reverse-apply/fixture.json @@ -0,0 +1,3 @@ +{ + "gitArgs": [] +} diff --git a/testdata/parity/reverse-apply/out b/testdata/parity/reverse-apply/out new file mode 100644 index 0000000..85c3040 --- /dev/null +++ b/testdata/parity/reverse-apply/out @@ -0,0 +1,3 @@ +alpha +beta +gamma diff --git a/testdata/parity/reverse-apply/patch b/testdata/parity/reverse-apply/patch new file mode 100644 index 0000000..3f1a01e --- /dev/null +++ b/testdata/parity/reverse-apply/patch @@ -0,0 +1,8 @@ +diff --git a/file.txt b/file.txt +--- a/file.txt ++++ b/file.txt +@@ -1,3 +1,3 @@ + alpha +-BETA ++beta + gamma diff --git a/testdata/parity/reverse-apply/src b/testdata/parity/reverse-apply/src new file mode 100644 index 0000000..e50310a --- /dev/null +++ b/testdata/parity/reverse-apply/src @@ -0,0 +1,3 @@ +alpha +BETA +gamma diff --git a/testdata/parity/reverse-option/fixture.json b/testdata/parity/reverse-option/fixture.json new file mode 100644 index 0000000..1836a72 --- /dev/null +++ b/testdata/parity/reverse-option/fixture.json @@ -0,0 +1,6 @@ +{ + "skipLibrary": true, + "gitArgs": [ + "--reverse" + ] +} diff --git a/testdata/parity/reverse-option/out b/testdata/parity/reverse-option/out new file mode 100644 index 0000000..422c2b7 --- /dev/null +++ b/testdata/parity/reverse-option/out @@ -0,0 +1,2 @@ +a +b diff --git a/testdata/parity/reverse-option/patch b/testdata/parity/reverse-option/patch new file mode 100644 index 0000000..9f45e3f --- /dev/null +++ b/testdata/parity/reverse-option/patch @@ -0,0 +1,7 @@ +diff --git a/file.txt b/file.txt +--- a/file.txt ++++ b/file.txt +@@ -1,2 +1,2 @@ +-a ++z + b diff --git a/testdata/parity/reverse-option/src b/testdata/parity/reverse-option/src new file mode 100644 index 0000000..10f80e6 --- /dev/null +++ b/testdata/parity/reverse-option/src @@ -0,0 +1,2 @@ +z +b diff --git a/testdata/parity/shrink-already-applied-reject/fixture.json b/testdata/parity/shrink-already-applied-reject/fixture.json new file mode 100644 index 0000000..6f909a4 --- /dev/null +++ b/testdata/parity/shrink-already-applied-reject/fixture.json @@ -0,0 +1,10 @@ +{ + "skipLibrary": true, + "expectGitError": true, + "srcFiles": { + "file.txt": "alpha\ngamma\n" + }, + "outFiles": { + "file.txt": "alpha\ngamma\n" + } +} diff --git a/testdata/parity/shrink-already-applied-reject/patch b/testdata/parity/shrink-already-applied-reject/patch new file mode 100644 index 0000000..1e265ed --- /dev/null +++ b/testdata/parity/shrink-already-applied-reject/patch @@ -0,0 +1,7 @@ +diff --git a/file.txt b/file.txt +--- a/file.txt ++++ b/file.txt +@@ -1,3 +1,2 @@ + alpha +-beta + gamma diff --git a/testdata/parity/unidiff-zero/fixture.json b/testdata/parity/unidiff-zero/fixture.json new file mode 100644 index 0000000..0a8462f --- /dev/null +++ b/testdata/parity/unidiff-zero/fixture.json @@ -0,0 +1,6 @@ +{ + "skipLibrary": true, + "gitArgs": [ + "--unidiff-zero" + ] +} diff --git a/testdata/parity/unidiff-zero/out b/testdata/parity/unidiff-zero/out new file mode 100644 index 0000000..b7ab80e --- /dev/null +++ b/testdata/parity/unidiff-zero/out @@ -0,0 +1,3 @@ +alpha +BRAVO +gamma diff --git a/testdata/parity/unidiff-zero/patch b/testdata/parity/unidiff-zero/patch new file mode 100644 index 0000000..1a12d44 --- /dev/null +++ b/testdata/parity/unidiff-zero/patch @@ -0,0 +1,6 @@ +diff --git a/file.txt b/file.txt +--- a/file.txt ++++ b/file.txt +@@ -2 +2 @@ +-beta ++BRAVO diff --git a/testdata/parity/unidiff-zero/src b/testdata/parity/unidiff-zero/src new file mode 100644 index 0000000..85c3040 --- /dev/null +++ b/testdata/parity/unidiff-zero/src @@ -0,0 +1,3 @@ +alpha +beta +gamma diff --git a/testdata/parity/whitespace-ignore-context-drift/fixture.json b/testdata/parity/whitespace-ignore-context-drift/fixture.json new file mode 100644 index 0000000..61e3af6 --- /dev/null +++ b/testdata/parity/whitespace-ignore-context-drift/fixture.json @@ -0,0 +1,7 @@ +{ + "gitArgs": [ + "--ignore-whitespace" + ], + "skipLibrary": true, + "ignoreWhitespace": true +} diff --git a/testdata/parity/whitespace-ignore-context-drift/out b/testdata/parity/whitespace-ignore-context-drift/out new file mode 100644 index 0000000..bbca5ca --- /dev/null +++ b/testdata/parity/whitespace-ignore-context-drift/out @@ -0,0 +1,3 @@ +alpha + BETA +charlie diff --git a/testdata/parity/whitespace-ignore-context-drift/patch b/testdata/parity/whitespace-ignore-context-drift/patch new file mode 100644 index 0000000..815eb76 --- /dev/null +++ b/testdata/parity/whitespace-ignore-context-drift/patch @@ -0,0 +1,8 @@ +diff --git a/file.txt b/file.txt +--- a/file.txt ++++ b/file.txt +@@ -1,3 +1,3 @@ + alpha +- beta ++ BETA + charlie diff --git a/testdata/parity/whitespace-ignore-context-drift/src b/testdata/parity/whitespace-ignore-context-drift/src new file mode 100644 index 0000000..601717a --- /dev/null +++ b/testdata/parity/whitespace-ignore-context-drift/src @@ -0,0 +1,3 @@ +alpha + beta +charlie diff --git a/testdata/parity/zero-context-delete/fixture.json b/testdata/parity/zero-context-delete/fixture.json new file mode 100644 index 0000000..0a8462f --- /dev/null +++ b/testdata/parity/zero-context-delete/fixture.json @@ -0,0 +1,6 @@ +{ + "skipLibrary": true, + "gitArgs": [ + "--unidiff-zero" + ] +} diff --git a/testdata/parity/zero-context-delete/out b/testdata/parity/zero-context-delete/out new file mode 100644 index 0000000..7819bf7 --- /dev/null +++ b/testdata/parity/zero-context-delete/out @@ -0,0 +1,2 @@ +alpha +gamma diff --git a/testdata/parity/zero-context-delete/patch b/testdata/parity/zero-context-delete/patch new file mode 100644 index 0000000..17df9b1 --- /dev/null +++ b/testdata/parity/zero-context-delete/patch @@ -0,0 +1,5 @@ +diff --git a/file.txt b/file.txt +--- a/file.txt ++++ b/file.txt +@@ -2 +1,0 @@ +-beta diff --git a/testdata/parity/zero-context-delete/src b/testdata/parity/zero-context-delete/src new file mode 100644 index 0000000..85c3040 --- /dev/null +++ b/testdata/parity/zero-context-delete/src @@ -0,0 +1,3 @@ +alpha +beta +gamma