sync: move pool related code to sync.pool, cleanup, add a README.md

This commit is contained in:
Delyan Angelov 2021-02-11 10:55:23 +02:00
parent 93c1c1cec3
commit 578de634fe
No known key found for this signature in database
GPG key ID: 66886C0F12D595ED
8 changed files with 292 additions and 347 deletions

View file

@ -3,25 +3,25 @@
// that can be found in the LICENSE file.
import net.http
import json
import sync
import sync.pool
struct Story {
title string
url string
}
fn worker_fetch(p &sync.PoolProcessor, cursor int, worker_id int) voidptr {
id := p.get_int_item(cursor)
fn worker_fetch(p &pool.PoolProcessor, cursor int, worker_id int) voidptr {
id := p.get_item<int>(cursor)
resp := http.get('https://hacker-news.firebaseio.com/v0/item/${id}.json') or {
println('failed to fetch data from /v0/item/${id}.json')
return sync.no_result
return pool.no_result
}
story := json.decode(Story,resp.text) or {
story := json.decode(Story, resp.text) or {
println('failed to decode a story')
return sync.no_result
return pool.no_result
}
println('# $cursor) $story.title | $story.url')
return sync.no_result
return pool.no_result
}
// Fetches top HN stories in parallel, depending on how many cores you have
@ -30,20 +30,20 @@ fn main() {
println('failed to fetch data from /v0/topstories.json')
return
}
mut ids := json.decode([]int,resp.text) or {
mut ids := json.decode([]int, resp.text) or {
println('failed to decode topstories.json')
return
}
if ids.len > 10 {
ids = ids[0..10]
}
mut fetcher_pool := sync.new_pool_processor({
mut fetcher_pool := pool.new_pool_processor(
callback: worker_fetch
})
)
// NB: if you do not call set_max_jobs, the pool will try to use an optimal
// number of threads, one per each core in your system, which in most
// cases is what you want anyway... You can override the automatic choice
// by setting the VJOBS environment variable too.
// fetcher_pool.set_max_jobs( 4 )
fetcher_pool.work_on_items_i(ids)
fetcher_pool.work_on_items(ids)
}