Skip to content

Commit 9320655

Browse files
authored
examples: add top-level await in examples (#206)
1 parent e2eac5b commit 9320655

File tree

3 files changed

+46
-34
lines changed

3 files changed

+46
-34
lines changed

examples/fill-in-middle/fill.ts

Lines changed: 10 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,12 @@
11
import ollama from 'ollama'
22

3-
const response = await ollama.generate({
4-
model: 'deepseek-coder-v2',
5-
prompt: `def add(`,
6-
suffix: `return c`,
7-
})
8-
console.log(response.response)
3+
async function main() {
4+
const response = await ollama.generate({
5+
model: 'deepseek-coder-v2',
6+
prompt: `def add(`,
7+
suffix: `return c`,
8+
})
9+
console.log(response.response)
10+
}
11+
12+
main().catch(console.error)

examples/multimodal/multimodal.ts

Lines changed: 13 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,16 @@
11
import ollama from 'ollama'
22

3-
const imagePath = './examples/multimodal/cat.jpg'
4-
const response = await ollama.generate({
5-
model: 'llava',
6-
prompt: 'describe this image:',
7-
images: [imagePath],
8-
stream: true,
9-
})
10-
for await (const part of response) {
11-
process.stdout.write(part.response)
3+
async function main() {
4+
const imagePath = './examples/multimodal/cat.jpg'
5+
const response = await ollama.generate({
6+
model: 'llava',
7+
prompt: 'describe this image:',
8+
images: [imagePath],
9+
stream: true,
10+
})
11+
for await (const part of response) {
12+
process.stdout.write(part.response)
13+
}
1214
}
15+
16+
main().catch(console.error)

examples/pull-progress/pull.ts

Lines changed: 23 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -1,25 +1,29 @@
11
import ollama from 'ollama'
22

3-
const model = 'llama3.1'
4-
console.log(`downloading ${model}...`)
5-
let currentDigestDone = false
6-
const stream = await ollama.pull({ model: model, stream: true })
7-
for await (const part of stream) {
8-
if (part.digest) {
9-
let percent = 0
10-
if (part.completed && part.total) {
11-
percent = Math.round((part.completed / part.total) * 100)
12-
}
13-
process.stdout.clearLine(0) // Clear the current line
14-
process.stdout.cursorTo(0) // Move cursor to the beginning of the line
15-
process.stdout.write(`${part.status} ${percent}%...`) // Write the new text
16-
if (percent === 100 && !currentDigestDone) {
17-
console.log() // Output to a new line
18-
currentDigestDone = true
3+
async function main() {
4+
const model = 'llama3.1'
5+
console.log(`downloading ${model}...`)
6+
let currentDigestDone = false
7+
const stream = await ollama.pull({ model: model, stream: true })
8+
for await (const part of stream) {
9+
if (part.digest) {
10+
let percent = 0
11+
if (part.completed && part.total) {
12+
percent = Math.round((part.completed / part.total) * 100)
13+
}
14+
process.stdout.clearLine(0) // Clear the current line
15+
process.stdout.cursorTo(0) // Move cursor to the beginning of the line
16+
process.stdout.write(`${part.status} ${percent}%...`) // Write the new text
17+
if (percent === 100 && !currentDigestDone) {
18+
console.log() // Output to a new line
19+
currentDigestDone = true
20+
} else {
21+
currentDigestDone = false
22+
}
1923
} else {
20-
currentDigestDone = false
24+
console.log(part.status)
2125
}
22-
} else {
23-
console.log(part.status)
2426
}
2527
}
28+
29+
main().catch(console.error)

0 commit comments

Comments
 (0)