mirror of
https://github.com/anomalyco/opencode.git
synced 2026-05-13 15:44:56 +00:00
effect(patch,tool): migrate patch/index and tool/read to AppFileSystem
The patch apply path now returns Effect<...> values backed by AppFileSystem.Service. The parser, chunk replacer, and shell-argv parser remain pure helpers. deriveNewContentsFromChunks now takes the original text instead of doing its own readFileSync so callers (apply_patch tool, applyHunksToFiles) can supply the content they already have. tool/read.ts swaps its fs.createReadStream + readline helper for a Stream pipeline over AppFileSystem.Service.stream — TextDecoder + map keeps the trailing unterminated line that Stream.decodeText and Stream.runForEachWhile both drop today. Limit / offset / byte-cap behaviour is preserved.
This commit is contained in:
@@ -67,11 +67,11 @@ Most exported tools are already on the intended Effect-native shape. The remaini
|
||||
|
||||
Current spot cleanups worth tracking:
|
||||
|
||||
- [ ] `read.ts` — still bridges to Node stream / `readline` helpers and Promise-based binary detection
|
||||
- [x] `read.ts` — streams through `AppFileSystem.Service.stream` with `Stream.splitLines`; the legacy Node stream / `readline` helper is gone
|
||||
- [ ] `bash.ts` — already uses Effect child-process primitives; only keep tracking shell-specific platform bridges and parser/loading details as they come up
|
||||
- [ ] `webfetch.ts` — already uses `HttpClient`; remaining work is limited to smaller boundary helpers like HTML text extraction
|
||||
- [ ] `file/ripgrep.ts` — adjacent to tool migration; still has raw fs/process usage that affects `grep.ts` and file-search routes
|
||||
- [ ] `patch/index.ts` — adjacent to tool migration; still has raw fs usage behind patch application
|
||||
- [x] `patch/index.ts` — apply path now returns `Effect` over `AppFileSystem.Service`; the parser and chunk replacer stay pure
|
||||
|
||||
Notable items that are already effectively on the target path and do not need separate migration bullets right now:
|
||||
|
||||
@@ -85,6 +85,4 @@ Notable items that are already effectively on the target path and do not need se
|
||||
|
||||
Current raw fs users that still appear relevant here:
|
||||
|
||||
- `tool/read.ts` — `fs.createReadStream`, `readline`
|
||||
- `file/ripgrep.ts` — `fs/promises`
|
||||
- `patch/index.ts` — `fs`, `fs/promises`
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import { Schema } from "effect"
|
||||
import { Effect, Schema } from "effect"
|
||||
import * as path from "path"
|
||||
import * as fs from "fs/promises"
|
||||
import { readFileSync } from "fs"
|
||||
import { AppFileSystem } from "@opencode-ai/core/filesystem"
|
||||
import * as Log from "@opencode-ai/core/util/log"
|
||||
import * as Bom from "../util/bom"
|
||||
|
||||
@@ -308,14 +307,12 @@ interface ApplyPatchFileUpdate {
|
||||
bom: boolean
|
||||
}
|
||||
|
||||
export function deriveNewContentsFromChunks(filePath: string, chunks: UpdateFileChunk[]): ApplyPatchFileUpdate {
|
||||
// Read original file content
|
||||
let originalContent: ReturnType<typeof Bom.split>
|
||||
try {
|
||||
originalContent = Bom.split(readFileSync(filePath, "utf-8"))
|
||||
} catch (error) {
|
||||
throw new Error(`Failed to read file ${filePath}: ${error}`, { cause: error })
|
||||
}
|
||||
export function deriveNewContentsFromChunks(
|
||||
filePath: string,
|
||||
chunks: UpdateFileChunk[],
|
||||
originalText: string,
|
||||
): ApplyPatchFileUpdate {
|
||||
const originalContent = Bom.split(originalText)
|
||||
|
||||
let originalLines = originalContent.text.split("\n")
|
||||
|
||||
@@ -423,11 +420,11 @@ function applyReplacements(lines: string[], replacements: Array<[number, number,
|
||||
// Normalize Unicode punctuation to ASCII equivalents (like Rust's normalize_unicode)
|
||||
function normalizeUnicode(str: string): string {
|
||||
return str
|
||||
.replace(/[\u2018\u2019\u201A\u201B]/g, "'") // single quotes
|
||||
.replace(/[\u201C\u201D\u201E\u201F]/g, '"') // double quotes
|
||||
.replace(/[\u2010\u2011\u2012\u2013\u2014\u2015]/g, "-") // dashes
|
||||
.replace(/\u2026/g, "...") // ellipsis
|
||||
.replace(/\u00A0/g, " ") // non-breaking space
|
||||
.replace(/[‘’‚‛]/g, "'") // single quotes
|
||||
.replace(/[“”„‟]/g, '"') // double quotes
|
||||
.replace(/[‐‑‒–—―]/g, "-") // dashes
|
||||
.replace(/…/g, "...") // ellipsis
|
||||
.replace(/ /g, " ") // non-breaking space
|
||||
}
|
||||
|
||||
type Comparator = (a: string, b: string) => boolean
|
||||
@@ -517,77 +514,71 @@ function generateUnifiedDiff(oldContent: string, newContent: string): string {
|
||||
}
|
||||
|
||||
// Apply hunks to filesystem
|
||||
export async function applyHunksToFiles(hunks: Hunk[]): Promise<AffectedPaths> {
|
||||
export const applyHunksToFiles = Effect.fn("Patch.applyHunksToFiles")(function* (hunks: Hunk[]) {
|
||||
if (hunks.length === 0) {
|
||||
throw new Error("No files were modified.")
|
||||
return yield* Effect.fail(new Error("No files were modified."))
|
||||
}
|
||||
|
||||
const fs = yield* AppFileSystem.Service
|
||||
|
||||
const added: string[] = []
|
||||
const modified: string[] = []
|
||||
const deleted: string[] = []
|
||||
|
||||
for (const hunk of hunks) {
|
||||
switch (hunk.type) {
|
||||
case "add":
|
||||
// Create parent directories
|
||||
const addDir = path.dirname(hunk.path)
|
||||
if (addDir !== "." && addDir !== "/") {
|
||||
await fs.mkdir(addDir, { recursive: true })
|
||||
}
|
||||
|
||||
await fs.writeFile(hunk.path, hunk.contents, "utf-8")
|
||||
case "add": {
|
||||
yield* fs.writeWithDirs(hunk.path, hunk.contents)
|
||||
added.push(hunk.path)
|
||||
log.info(`Added file: ${hunk.path}`)
|
||||
break
|
||||
}
|
||||
|
||||
case "delete":
|
||||
await fs.unlink(hunk.path)
|
||||
case "delete": {
|
||||
yield* fs.remove(hunk.path)
|
||||
deleted.push(hunk.path)
|
||||
log.info(`Deleted file: ${hunk.path}`)
|
||||
break
|
||||
}
|
||||
|
||||
case "update":
|
||||
const fileUpdate = deriveNewContentsFromChunks(hunk.path, hunk.chunks)
|
||||
case "update": {
|
||||
const originalText = yield* fs.readFileString(hunk.path)
|
||||
const fileUpdate = deriveNewContentsFromChunks(hunk.path, hunk.chunks, originalText)
|
||||
|
||||
if (hunk.move_path) {
|
||||
// Handle file move
|
||||
const moveDir = path.dirname(hunk.move_path)
|
||||
if (moveDir !== "." && moveDir !== "/") {
|
||||
await fs.mkdir(moveDir, { recursive: true })
|
||||
}
|
||||
|
||||
await fs.writeFile(hunk.move_path, Bom.join(fileUpdate.content, fileUpdate.bom), "utf-8")
|
||||
await fs.unlink(hunk.path)
|
||||
yield* fs.writeWithDirs(hunk.move_path, Bom.join(fileUpdate.content, fileUpdate.bom))
|
||||
yield* fs.remove(hunk.path)
|
||||
modified.push(hunk.move_path)
|
||||
log.info(`Moved file: ${hunk.path} -> ${hunk.move_path}`)
|
||||
} else {
|
||||
// Regular update
|
||||
await fs.writeFile(hunk.path, Bom.join(fileUpdate.content, fileUpdate.bom), "utf-8")
|
||||
yield* fs.writeWithDirs(hunk.path, Bom.join(fileUpdate.content, fileUpdate.bom))
|
||||
modified.push(hunk.path)
|
||||
log.info(`Updated file: ${hunk.path}`)
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return { added, modified, deleted }
|
||||
}
|
||||
return { added, modified, deleted } satisfies AffectedPaths
|
||||
})
|
||||
|
||||
// Main patch application function
|
||||
export async function applyPatch(patchText: string): Promise<AffectedPaths> {
|
||||
export const applyPatch = Effect.fn("Patch.applyPatch")(function* (patchText: string) {
|
||||
const { hunks } = parsePatch(patchText)
|
||||
return applyHunksToFiles(hunks)
|
||||
}
|
||||
return yield* applyHunksToFiles(hunks)
|
||||
})
|
||||
|
||||
// Async version of maybeParseApplyPatchVerified
|
||||
export async function maybeParseApplyPatchVerified(
|
||||
argv: string[],
|
||||
cwd: string,
|
||||
): Promise<
|
||||
type MaybeApplyPatchVerifiedResult =
|
||||
| { type: MaybeApplyPatchVerified.Body; action: ApplyPatchAction }
|
||||
| { type: MaybeApplyPatchVerified.CorrectnessError; error: Error }
|
||||
| { type: MaybeApplyPatchVerified.NotApplyPatch }
|
||||
> {
|
||||
|
||||
// Effectful verified-parse: needs AppFileSystem.Service to read existing files
|
||||
export const maybeParseApplyPatchVerified = Effect.fn("Patch.maybeParseApplyPatchVerified")(function* (
|
||||
argv: string[],
|
||||
cwd: string,
|
||||
) {
|
||||
// Detect implicit patch invocation (raw patch without apply_patch command)
|
||||
if (argv.length === 1) {
|
||||
try {
|
||||
@@ -595,7 +586,7 @@ export async function maybeParseApplyPatchVerified(
|
||||
return {
|
||||
type: MaybeApplyPatchVerified.CorrectnessError,
|
||||
error: new Error(ApplyPatchError.ImplicitInvocation),
|
||||
}
|
||||
} satisfies MaybeApplyPatchVerifiedResult
|
||||
} catch {
|
||||
// Not a patch, continue
|
||||
}
|
||||
@@ -604,8 +595,9 @@ export async function maybeParseApplyPatchVerified(
|
||||
const result = maybeParseApplyPatch(argv)
|
||||
|
||||
switch (result.type) {
|
||||
case MaybeApplyPatch.Body:
|
||||
const { args } = result
|
||||
case MaybeApplyPatch.Body: {
|
||||
const fs = yield* AppFileSystem.Service
|
||||
const args = result.args
|
||||
const effectiveCwd = args.workdir ? path.resolve(cwd, args.workdir) : cwd
|
||||
const changes = new Map<string, ApplyPatchFileChange>()
|
||||
|
||||
@@ -623,27 +615,37 @@ export async function maybeParseApplyPatchVerified(
|
||||
})
|
||||
break
|
||||
|
||||
case "delete":
|
||||
// For delete, we need to read the current content
|
||||
case "delete": {
|
||||
const deletePath = path.resolve(effectiveCwd, hunk.path)
|
||||
try {
|
||||
const content = await fs.readFile(deletePath, "utf-8")
|
||||
changes.set(resolvedPath, {
|
||||
type: "delete",
|
||||
content,
|
||||
})
|
||||
} catch {
|
||||
const content = yield* fs.readFileString(deletePath).pipe(Effect.catch(() => Effect.succeed(undefined)))
|
||||
if (content === undefined) {
|
||||
return {
|
||||
type: MaybeApplyPatchVerified.CorrectnessError,
|
||||
error: new Error(`Failed to read file for deletion: ${deletePath}`),
|
||||
}
|
||||
} satisfies MaybeApplyPatchVerifiedResult
|
||||
}
|
||||
changes.set(resolvedPath, {
|
||||
type: "delete",
|
||||
content,
|
||||
})
|
||||
break
|
||||
}
|
||||
|
||||
case "update":
|
||||
case "update": {
|
||||
const updatePath = path.resolve(effectiveCwd, hunk.path)
|
||||
const originalText = yield* fs.readFileString(updatePath).pipe(
|
||||
Effect.catch((cause) =>
|
||||
Effect.succeed(new Error(`Failed to read file ${updatePath}: ${cause}`, { cause })),
|
||||
),
|
||||
)
|
||||
if (originalText instanceof Error) {
|
||||
return {
|
||||
type: MaybeApplyPatchVerified.CorrectnessError,
|
||||
error: originalText,
|
||||
} satisfies MaybeApplyPatchVerifiedResult
|
||||
}
|
||||
try {
|
||||
const fileUpdate = deriveNewContentsFromChunks(updatePath, hunk.chunks)
|
||||
const fileUpdate = deriveNewContentsFromChunks(updatePath, hunk.chunks, originalText)
|
||||
changes.set(resolvedPath, {
|
||||
type: "update",
|
||||
unified_diff: fileUpdate.unified_diff,
|
||||
@@ -654,9 +656,10 @@ export async function maybeParseApplyPatchVerified(
|
||||
return {
|
||||
type: MaybeApplyPatchVerified.CorrectnessError,
|
||||
error: error as Error,
|
||||
}
|
||||
} satisfies MaybeApplyPatchVerifiedResult
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -667,17 +670,18 @@ export async function maybeParseApplyPatchVerified(
|
||||
patch: args.patch,
|
||||
cwd: effectiveCwd,
|
||||
},
|
||||
}
|
||||
} satisfies MaybeApplyPatchVerifiedResult
|
||||
}
|
||||
|
||||
case MaybeApplyPatch.PatchParseError:
|
||||
return {
|
||||
type: MaybeApplyPatchVerified.CorrectnessError,
|
||||
error: result.error,
|
||||
}
|
||||
} satisfies MaybeApplyPatchVerifiedResult
|
||||
|
||||
case MaybeApplyPatch.NotApplyPatch:
|
||||
return { type: MaybeApplyPatchVerified.NotApplyPatch }
|
||||
return { type: MaybeApplyPatchVerified.NotApplyPatch } satisfies MaybeApplyPatchVerifiedResult
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
export * as Patch from "."
|
||||
|
||||
@@ -119,7 +119,7 @@ export const ApplyPatchTool = Tool.define(
|
||||
|
||||
// Apply the update chunks to get new content
|
||||
try {
|
||||
const fileUpdate = Patch.deriveNewContentsFromChunks(filePath, hunk.chunks)
|
||||
const fileUpdate = Patch.deriveNewContentsFromChunks(filePath, hunk.chunks, Bom.join(source.text, source.bom))
|
||||
newContent = fileUpdate.content
|
||||
bom = fileUpdate.bom
|
||||
} catch (error) {
|
||||
|
||||
@@ -1,8 +1,6 @@
|
||||
import { Effect, Option, Schema, Scope } from "effect"
|
||||
import { Effect, Option, Schema, Scope, Stream } from "effect"
|
||||
import { NonNegativeInt } from "@opencode-ai/core/schema"
|
||||
import { createReadStream } from "fs"
|
||||
import * as path from "path"
|
||||
import { createInterface } from "readline"
|
||||
import * as Tool from "./tool"
|
||||
import { AppFileSystem } from "@opencode-ai/core/filesystem"
|
||||
import { LSP } from "@/lsp/lsp"
|
||||
@@ -105,6 +103,49 @@ export const ReadTool = Tool.define(
|
||||
)
|
||||
})
|
||||
|
||||
const lines = Effect.fn("ReadTool.lines")(function* (filepath: string, opts: { limit: number; offset: number }) {
|
||||
const start = opts.offset - 1
|
||||
const raw: string[] = []
|
||||
const flags = { bytes: 0, count: 0, cut: false, more: false, done: false }
|
||||
|
||||
// Note: prefer manual TextDecoder over Stream.decodeText — when the source stream
|
||||
// ends without flushing, decodeText drops the final unterminated line. We also
|
||||
// avoid Stream.runForEachWhile (it currently swallows the final unterminated
|
||||
// line of the upstream splitLines pipeline) and instead toggle a `done` flag
|
||||
// and ignore subsequent lines.
|
||||
const decoder = new TextDecoder("utf-8")
|
||||
yield* fs.stream(filepath).pipe(
|
||||
Stream.map((bytes) => decoder.decode(bytes, { stream: true })),
|
||||
Stream.splitLines,
|
||||
Stream.runForEach((text) =>
|
||||
Effect.sync(() => {
|
||||
if (flags.done) return
|
||||
flags.count += 1
|
||||
if (flags.count <= start) return
|
||||
|
||||
if (raw.length >= opts.limit) {
|
||||
flags.more = true
|
||||
return
|
||||
}
|
||||
|
||||
const line = text.length > MAX_LINE_LENGTH ? text.substring(0, MAX_LINE_LENGTH) + MAX_LINE_SUFFIX : text
|
||||
const size = Buffer.byteLength(line, "utf-8") + (raw.length > 0 ? 1 : 0)
|
||||
if (flags.bytes + size > MAX_BYTES) {
|
||||
flags.cut = true
|
||||
flags.more = true
|
||||
flags.done = true
|
||||
return
|
||||
}
|
||||
|
||||
raw.push(line)
|
||||
flags.bytes += size
|
||||
}),
|
||||
),
|
||||
)
|
||||
|
||||
return { raw, count: flags.count, cut: flags.cut, more: flags.more, offset: opts.offset }
|
||||
})
|
||||
|
||||
const isBinaryFile = (filepath: string, bytes: Uint8Array) => {
|
||||
const ext = path.extname(filepath).toLowerCase()
|
||||
switch (ext) {
|
||||
@@ -247,9 +288,7 @@ export const ReadTool = Tool.define(
|
||||
return yield* Effect.fail(new Error(`Cannot read binary file: ${filepath}`))
|
||||
}
|
||||
|
||||
const file = yield* Effect.promise(() =>
|
||||
lines(filepath, { limit: params.limit ?? DEFAULT_READ_LIMIT, offset: params.offset || 1 }),
|
||||
)
|
||||
const file = yield* lines(filepath, { limit: params.limit ?? DEFAULT_READ_LIMIT, offset: params.offset || 1 })
|
||||
if (file.count < file.offset && !(file.count === 0 && file.offset === 1)) {
|
||||
return yield* Effect.fail(
|
||||
new Error(`Offset ${file.offset} is out of range for this file (${file.count} lines)`),
|
||||
@@ -296,47 +335,3 @@ export const ReadTool = Tool.define(
|
||||
}
|
||||
}),
|
||||
)
|
||||
|
||||
async function lines(filepath: string, opts: { limit: number; offset: number }) {
|
||||
const stream = createReadStream(filepath, { encoding: "utf8" })
|
||||
const rl = createInterface({
|
||||
input: stream,
|
||||
// Note: we use the crlfDelay option to recognize all instances of CR LF
|
||||
// ('\r\n') in file as a single line break.
|
||||
crlfDelay: Infinity,
|
||||
})
|
||||
|
||||
const start = opts.offset - 1
|
||||
const raw: string[] = []
|
||||
let bytes = 0
|
||||
let count = 0
|
||||
let cut = false
|
||||
let more = false
|
||||
try {
|
||||
for await (const text of rl) {
|
||||
count += 1
|
||||
if (count <= start) continue
|
||||
|
||||
if (raw.length >= opts.limit) {
|
||||
more = true
|
||||
continue
|
||||
}
|
||||
|
||||
const line = text.length > MAX_LINE_LENGTH ? text.substring(0, MAX_LINE_LENGTH) + MAX_LINE_SUFFIX : text
|
||||
const size = Buffer.byteLength(line, "utf-8") + (raw.length > 0 ? 1 : 0)
|
||||
if (bytes + size > MAX_BYTES) {
|
||||
cut = true
|
||||
more = true
|
||||
break
|
||||
}
|
||||
|
||||
raw.push(line)
|
||||
bytes += size
|
||||
}
|
||||
} finally {
|
||||
rl.close()
|
||||
stream.destroy()
|
||||
}
|
||||
|
||||
return { raw, count, cut, more, offset: opts.offset }
|
||||
}
|
||||
|
||||
@@ -1,8 +1,13 @@
|
||||
import { describe, test, expect, beforeEach, afterEach } from "bun:test"
|
||||
import { Patch } from "../../src/patch"
|
||||
import { Effect } from "effect"
|
||||
import * as fs from "fs/promises"
|
||||
import * as path from "path"
|
||||
import { tmpdir } from "os"
|
||||
import { Patch } from "../../src/patch"
|
||||
import { AppFileSystem } from "@opencode-ai/core/filesystem"
|
||||
import { testEffect } from "../lib/effect"
|
||||
|
||||
const it = testEffect(AppFileSystem.defaultLayer)
|
||||
|
||||
describe("Patch namespace", () => {
|
||||
let tempDir: string
|
||||
@@ -134,46 +139,53 @@ PATCH`
|
||||
})
|
||||
|
||||
describe("applyPatch", () => {
|
||||
test("should add a new file", async () => {
|
||||
const patchText = `*** Begin Patch
|
||||
it.live("should add a new file", () =>
|
||||
Effect.gen(function* () {
|
||||
const patchText = `*** Begin Patch
|
||||
*** Add File: ${tempDir}/new-file.txt
|
||||
+Hello World
|
||||
+This is a new file
|
||||
*** End Patch`
|
||||
|
||||
const result = await Patch.applyPatch(patchText)
|
||||
expect(result.added).toHaveLength(1)
|
||||
expect(result.modified).toHaveLength(0)
|
||||
expect(result.deleted).toHaveLength(0)
|
||||
const result = yield* Patch.applyPatch(patchText)
|
||||
expect(result.added).toHaveLength(1)
|
||||
expect(result.modified).toHaveLength(0)
|
||||
expect(result.deleted).toHaveLength(0)
|
||||
|
||||
const content = await fs.readFile(result.added[0], "utf-8")
|
||||
expect(content).toBe("Hello World\nThis is a new file")
|
||||
})
|
||||
const content = yield* Effect.promise(() => fs.readFile(result.added[0], "utf-8"))
|
||||
expect(content).toBe("Hello World\nThis is a new file")
|
||||
}),
|
||||
)
|
||||
|
||||
test("should delete an existing file", async () => {
|
||||
const filePath = path.join(tempDir, "to-delete.txt")
|
||||
await fs.writeFile(filePath, "This file will be deleted")
|
||||
it.live("should delete an existing file", () =>
|
||||
Effect.gen(function* () {
|
||||
const filePath = path.join(tempDir, "to-delete.txt")
|
||||
yield* Effect.promise(() => fs.writeFile(filePath, "This file will be deleted"))
|
||||
|
||||
const patchText = `*** Begin Patch
|
||||
const patchText = `*** Begin Patch
|
||||
*** Delete File: ${filePath}
|
||||
*** End Patch`
|
||||
|
||||
const result = await Patch.applyPatch(patchText)
|
||||
expect(result.deleted).toHaveLength(1)
|
||||
expect(result.deleted[0]).toBe(filePath)
|
||||
const result = yield* Patch.applyPatch(patchText)
|
||||
expect(result.deleted).toHaveLength(1)
|
||||
expect(result.deleted[0]).toBe(filePath)
|
||||
|
||||
const exists = await fs
|
||||
.access(filePath)
|
||||
.then(() => true)
|
||||
.catch(() => false)
|
||||
expect(exists).toBe(false)
|
||||
})
|
||||
const exists = yield* Effect.promise(() =>
|
||||
fs
|
||||
.access(filePath)
|
||||
.then(() => true)
|
||||
.catch(() => false),
|
||||
)
|
||||
expect(exists).toBe(false)
|
||||
}),
|
||||
)
|
||||
|
||||
test("should update an existing file", async () => {
|
||||
const filePath = path.join(tempDir, "to-update.txt")
|
||||
await fs.writeFile(filePath, "line 1\nline 2\nline 3\n")
|
||||
it.live("should update an existing file", () =>
|
||||
Effect.gen(function* () {
|
||||
const filePath = path.join(tempDir, "to-update.txt")
|
||||
yield* Effect.promise(() => fs.writeFile(filePath, "line 1\nline 2\nline 3\n"))
|
||||
|
||||
const patchText = `*** Begin Patch
|
||||
const patchText = `*** Begin Patch
|
||||
*** Update File: ${filePath}
|
||||
@@
|
||||
line 1
|
||||
@@ -182,20 +194,22 @@ PATCH`
|
||||
line 3
|
||||
*** End Patch`
|
||||
|
||||
const result = await Patch.applyPatch(patchText)
|
||||
expect(result.modified).toHaveLength(1)
|
||||
expect(result.modified[0]).toBe(filePath)
|
||||
const result = yield* Patch.applyPatch(patchText)
|
||||
expect(result.modified).toHaveLength(1)
|
||||
expect(result.modified[0]).toBe(filePath)
|
||||
|
||||
const content = await fs.readFile(filePath, "utf-8")
|
||||
expect(content).toBe("line 1\nline 2 updated\nline 3\n")
|
||||
})
|
||||
const content = yield* Effect.promise(() => fs.readFile(filePath, "utf-8"))
|
||||
expect(content).toBe("line 1\nline 2 updated\nline 3\n")
|
||||
}),
|
||||
)
|
||||
|
||||
test("should move and update a file", async () => {
|
||||
const oldPath = path.join(tempDir, "old-name.txt")
|
||||
const newPath = path.join(tempDir, "new-name.txt")
|
||||
await fs.writeFile(oldPath, "old content\n")
|
||||
it.live("should move and update a file", () =>
|
||||
Effect.gen(function* () {
|
||||
const oldPath = path.join(tempDir, "old-name.txt")
|
||||
const newPath = path.join(tempDir, "new-name.txt")
|
||||
yield* Effect.promise(() => fs.writeFile(oldPath, "old content\n"))
|
||||
|
||||
const patchText = `*** Begin Patch
|
||||
const patchText = `*** Begin Patch
|
||||
*** Update File: ${oldPath}
|
||||
*** Move to: ${newPath}
|
||||
@@
|
||||
@@ -203,29 +217,33 @@ PATCH`
|
||||
+new content
|
||||
*** End Patch`
|
||||
|
||||
const result = await Patch.applyPatch(patchText)
|
||||
expect(result.modified).toHaveLength(1)
|
||||
expect(result.modified[0]).toBe(newPath)
|
||||
const result = yield* Patch.applyPatch(patchText)
|
||||
expect(result.modified).toHaveLength(1)
|
||||
expect(result.modified[0]).toBe(newPath)
|
||||
|
||||
const oldExists = await fs
|
||||
.access(oldPath)
|
||||
.then(() => true)
|
||||
.catch(() => false)
|
||||
expect(oldExists).toBe(false)
|
||||
const oldExists = yield* Effect.promise(() =>
|
||||
fs
|
||||
.access(oldPath)
|
||||
.then(() => true)
|
||||
.catch(() => false),
|
||||
)
|
||||
expect(oldExists).toBe(false)
|
||||
|
||||
const newContent = await fs.readFile(newPath, "utf-8")
|
||||
expect(newContent).toBe("new content\n")
|
||||
})
|
||||
const newContent = yield* Effect.promise(() => fs.readFile(newPath, "utf-8"))
|
||||
expect(newContent).toBe("new content\n")
|
||||
}),
|
||||
)
|
||||
|
||||
test("should handle multiple operations in one patch", async () => {
|
||||
const file1 = path.join(tempDir, "file1.txt")
|
||||
const file2 = path.join(tempDir, "file2.txt")
|
||||
const file3 = path.join(tempDir, "file3.txt")
|
||||
it.live("should handle multiple operations in one patch", () =>
|
||||
Effect.gen(function* () {
|
||||
const file1 = path.join(tempDir, "file1.txt")
|
||||
const file2 = path.join(tempDir, "file2.txt")
|
||||
const file3 = path.join(tempDir, "file3.txt")
|
||||
|
||||
await fs.writeFile(file1, "content 1")
|
||||
await fs.writeFile(file2, "content 2")
|
||||
yield* Effect.promise(() => fs.writeFile(file1, "content 1"))
|
||||
yield* Effect.promise(() => fs.writeFile(file2, "content 2"))
|
||||
|
||||
const patchText = `*** Begin Patch
|
||||
const patchText = `*** Begin Patch
|
||||
*** Add File: ${file3}
|
||||
+new file content
|
||||
*** Update File: ${file1}
|
||||
@@ -235,98 +253,114 @@ PATCH`
|
||||
*** Delete File: ${file2}
|
||||
*** End Patch`
|
||||
|
||||
const result = await Patch.applyPatch(patchText)
|
||||
expect(result.added).toHaveLength(1)
|
||||
expect(result.modified).toHaveLength(1)
|
||||
expect(result.deleted).toHaveLength(1)
|
||||
})
|
||||
const result = yield* Patch.applyPatch(patchText)
|
||||
expect(result.added).toHaveLength(1)
|
||||
expect(result.modified).toHaveLength(1)
|
||||
expect(result.deleted).toHaveLength(1)
|
||||
}),
|
||||
)
|
||||
|
||||
test("should create parent directories when adding files", async () => {
|
||||
const nestedPath = path.join(tempDir, "deep", "nested", "file.txt")
|
||||
it.live("should create parent directories when adding files", () =>
|
||||
Effect.gen(function* () {
|
||||
const nestedPath = path.join(tempDir, "deep", "nested", "file.txt")
|
||||
|
||||
const patchText = `*** Begin Patch
|
||||
const patchText = `*** Begin Patch
|
||||
*** Add File: ${nestedPath}
|
||||
+Deep nested content
|
||||
*** End Patch`
|
||||
|
||||
const result = await Patch.applyPatch(patchText)
|
||||
expect(result.added).toHaveLength(1)
|
||||
expect(result.added[0]).toBe(nestedPath)
|
||||
const result = yield* Patch.applyPatch(patchText)
|
||||
expect(result.added).toHaveLength(1)
|
||||
expect(result.added[0]).toBe(nestedPath)
|
||||
|
||||
const exists = await fs
|
||||
.access(nestedPath)
|
||||
.then(() => true)
|
||||
.catch(() => false)
|
||||
expect(exists).toBe(true)
|
||||
})
|
||||
const exists = yield* Effect.promise(() =>
|
||||
fs
|
||||
.access(nestedPath)
|
||||
.then(() => true)
|
||||
.catch(() => false),
|
||||
)
|
||||
expect(exists).toBe(true)
|
||||
}),
|
||||
)
|
||||
})
|
||||
|
||||
describe("error handling", () => {
|
||||
test("should throw error when updating non-existent file", async () => {
|
||||
const nonExistent = path.join(tempDir, "does-not-exist.txt")
|
||||
it.live("should fail when updating non-existent file", () =>
|
||||
Effect.gen(function* () {
|
||||
const nonExistent = path.join(tempDir, "does-not-exist.txt")
|
||||
|
||||
const patchText = `*** Begin Patch
|
||||
const patchText = `*** Begin Patch
|
||||
*** Update File: ${nonExistent}
|
||||
@@
|
||||
-old line
|
||||
+new line
|
||||
*** End Patch`
|
||||
|
||||
await expect(Patch.applyPatch(patchText)).rejects.toThrow()
|
||||
})
|
||||
const exit = yield* Effect.exit(Patch.applyPatch(patchText))
|
||||
expect(exit._tag).toBe("Failure")
|
||||
}),
|
||||
)
|
||||
|
||||
test("should throw error when deleting non-existent file", async () => {
|
||||
const nonExistent = path.join(tempDir, "does-not-exist.txt")
|
||||
it.live("should fail when deleting non-existent file", () =>
|
||||
Effect.gen(function* () {
|
||||
const nonExistent = path.join(tempDir, "does-not-exist.txt")
|
||||
|
||||
const patchText = `*** Begin Patch
|
||||
const patchText = `*** Begin Patch
|
||||
*** Delete File: ${nonExistent}
|
||||
*** End Patch`
|
||||
|
||||
await expect(Patch.applyPatch(patchText)).rejects.toThrow()
|
||||
})
|
||||
const exit = yield* Effect.exit(Patch.applyPatch(patchText))
|
||||
expect(exit._tag).toBe("Failure")
|
||||
}),
|
||||
)
|
||||
})
|
||||
|
||||
describe("edge cases", () => {
|
||||
test("should handle empty files", async () => {
|
||||
const emptyFile = path.join(tempDir, "empty.txt")
|
||||
await fs.writeFile(emptyFile, "")
|
||||
it.live("should handle empty files", () =>
|
||||
Effect.gen(function* () {
|
||||
const emptyFile = path.join(tempDir, "empty.txt")
|
||||
yield* Effect.promise(() => fs.writeFile(emptyFile, ""))
|
||||
|
||||
const patchText = `*** Begin Patch
|
||||
const patchText = `*** Begin Patch
|
||||
*** Update File: ${emptyFile}
|
||||
@@
|
||||
+First line
|
||||
*** End Patch`
|
||||
|
||||
const result = await Patch.applyPatch(patchText)
|
||||
expect(result.modified).toHaveLength(1)
|
||||
const result = yield* Patch.applyPatch(patchText)
|
||||
expect(result.modified).toHaveLength(1)
|
||||
|
||||
const content = await fs.readFile(emptyFile, "utf-8")
|
||||
expect(content).toBe("First line\n")
|
||||
})
|
||||
const content = yield* Effect.promise(() => fs.readFile(emptyFile, "utf-8"))
|
||||
expect(content).toBe("First line\n")
|
||||
}),
|
||||
)
|
||||
|
||||
test("should handle files with no trailing newline", async () => {
|
||||
const filePath = path.join(tempDir, "no-newline.txt")
|
||||
await fs.writeFile(filePath, "no newline")
|
||||
it.live("should handle files with no trailing newline", () =>
|
||||
Effect.gen(function* () {
|
||||
const filePath = path.join(tempDir, "no-newline.txt")
|
||||
yield* Effect.promise(() => fs.writeFile(filePath, "no newline"))
|
||||
|
||||
const patchText = `*** Begin Patch
|
||||
const patchText = `*** Begin Patch
|
||||
*** Update File: ${filePath}
|
||||
@@
|
||||
-no newline
|
||||
+has newline now
|
||||
*** End Patch`
|
||||
|
||||
const result = await Patch.applyPatch(patchText)
|
||||
expect(result.modified).toHaveLength(1)
|
||||
const result = yield* Patch.applyPatch(patchText)
|
||||
expect(result.modified).toHaveLength(1)
|
||||
|
||||
const content = await fs.readFile(filePath, "utf-8")
|
||||
expect(content).toBe("has newline now\n")
|
||||
})
|
||||
const content = yield* Effect.promise(() => fs.readFile(filePath, "utf-8"))
|
||||
expect(content).toBe("has newline now\n")
|
||||
}),
|
||||
)
|
||||
|
||||
test("should handle multiple update chunks in single file", async () => {
|
||||
const filePath = path.join(tempDir, "multi-chunk.txt")
|
||||
await fs.writeFile(filePath, "line 1\nline 2\nline 3\nline 4\n")
|
||||
it.live("should handle multiple update chunks in single file", () =>
|
||||
Effect.gen(function* () {
|
||||
const filePath = path.join(tempDir, "multi-chunk.txt")
|
||||
yield* Effect.promise(() => fs.writeFile(filePath, "line 1\nline 2\nline 3\nline 4\n"))
|
||||
|
||||
const patchText = `*** Begin Patch
|
||||
const patchText = `*** Begin Patch
|
||||
*** Update File: ${filePath}
|
||||
@@
|
||||
line 1
|
||||
@@ -338,11 +372,12 @@ PATCH`
|
||||
+LINE 4
|
||||
*** End Patch`
|
||||
|
||||
const result = await Patch.applyPatch(patchText)
|
||||
expect(result.modified).toHaveLength(1)
|
||||
const result = yield* Patch.applyPatch(patchText)
|
||||
expect(result.modified).toHaveLength(1)
|
||||
|
||||
const content = await fs.readFile(filePath, "utf-8")
|
||||
expect(content).toBe("line 1\nLINE 2\nline 3\nLINE 4\n")
|
||||
})
|
||||
const content = yield* Effect.promise(() => fs.readFile(filePath, "utf-8"))
|
||||
expect(content).toBe("line 1\nLINE 2\nline 3\nLINE 4\n")
|
||||
}),
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
Reference in New Issue
Block a user