Sebastian Markbåge (2016-03-15T23:43:25.000Z)
Has anyone previously proposed a Algebraic Effects (e.g. Eff like handlers
of continuations) for ECMAScript? #lazyweb  I could only find variations
that aren't quite this.

I'm specifically looking at the OCaml implementation for inspiration:

http://kcsrk.info/ocaml/multicore/2015/05/20/effects-multicore/

http://kcsrk.info/slides/multicore_fb16.pdf

I'm not proposing the multicore aspect of this but just the delimited
continuations.

Basically, the idea is that you can capture effects by wrapping a call in a
"try". That spawns a fiber. Then any function can "perform" an effect as a
new language feature. That works effectively like "throw" expect it also
captures a reified continuation, in a tuple. The continuation can be
invoked to continue where you left off.

Imaginary syntax:

function otherFunction() {
  console.log(1);
  let a = perform { x: 1, y: 2 };
  console.log(a);
  return a;
}

do try {
  let b = otherFunction();
  b + 1;
} catch effect -> [{ x, y }, continuation] {
  console.log(2);
  let c = continuation(x + y);
  console.log(c);
}

Prints:
1
2
3
4

(`perform` is a contextual keyword to "throw" and `catch effect` is a
keyword for catching it.)

We've experimented with changing React's implementation to use these
internally to handle concurrency and being able to solve complex algorithms
that require a lot of back and forth such as layout calculation. It seems
to make these implementations much easier while remaining efficient.

It also allows for seamless async I/O handling by yielding deep in a fiber.

Effectively this is just solving the same thing as generator and
async/await.

However, the benefit is that you don't have a split between "async"
functions, generator functions and synchronous functions. You still have an
explicit entry point through the place where you catch effects.

With generators and async functions, anytime you want to change any deep
effects you have to unwind all potential callers. Any intermediate library
have to be turned into async functions. The refactoring is painful and
leaves you with a lot of syntax overhead.

If you want to nest different effects such as layout, iterations and async
functions that complexity explodes because now every intermediate function
has to be able to handle all those concepts.

The performance characteristics demonstrated by KC Sivaramakrishnan are
also much more promising than JS VMs has been able to do with async/await
and generators so far. It's plausible that VMs can optimize this in similar
way, in time. I suspect that the leakiness of the microtask queue might
cause problems though.

I converted the OCaml example scheduler to this ECMAScript compatible
syntax:

// RoundRobinScheduler.js

class Fork {
  constructor(fn) {
    this.fn = fn;
  }
}
function fork(f) {
  perform new Fork(f)
}

class Yield { }
function yieldToScheduler() {
  perform new Yield();
}

export function run(main) {
  const run_q = [];
  function enqueue(k) {
    run_q.push(k);
  }
  function dequeue() {
    if (run_q.length) {
      run_q.shift()();
    }
  }
  function spawn(f) {
    try {
      f();
      dequeue();
    } catch (e) {
      console.log(e.toString());
    } catch effect Yield -> [_, k] {
      enqueue(k);
      dequeue();
    } catch effect Fork -> [fork, k] {
      enqueue(k);
      spawn(fork.fn);
    }
  }
  spawn(main);
}

// Example.js

import * as Sched from "RoundRobinScheduler";

function f(id, depth) {
  console.log("Starting number %i", id);
  if (depth > 0) {
    console.log("Forking number %i", id * 2 + 1);
    Sched.fork(() => f(id * 2 + 1, depth - 1));
    console.log("Forking number %i", id * 2 + 2);
    Sched.fork(() => f(id * 2 + 2, depth - 1));
  } else {
    console.log("Yielding in number %i", id);
    Sched.yield();
    console.log("Resumed number %i", id);
  }
  console.log("Finishing number %i", id);
}

Sched.run(() => f(0, 2));
-------------- next part --------------
An HTML attachment was scrubbed...
URL: <http://mail.mozilla.org/pipermail/es-discuss/attachments/20160315/ff8a061d/attachment-0001.html>
sebastian at calyptus.eu (2016-03-16T00:48:02.597Z)
Has anyone previously proposed a Algebraic Effects (e.g. Eff like handlers
of continuations) for ECMAScript? #lazyweb  I could only find variations
that aren't quite this.

I'm specifically looking at the OCaml implementation for inspiration:

http://kcsrk.info/ocaml/multicore/2015/05/20/effects-multicore/

http://kcsrk.info/slides/multicore_fb16.pdf

I'm not proposing the multicore aspect of this but just the delimited
continuations.

Basically, the idea is that you can capture effects by wrapping a call in a
"try". That spawns a fiber. Then any function can "perform" an effect as a
new language feature. That works effectively like "throw" expect it also
captures a reified continuation, in a tuple. The continuation can be
invoked to continue where you left off.

Imaginary syntax:

```js
function otherFunction() {
  console.log(1);
  let a = perform { x: 1, y: 2 };
  console.log(a);
  return a;
}

do try {
  let b = otherFunction();
  b + 1;
} catch effect -> [{ x, y }, continuation] {
  console.log(2);
  let c = continuation(x + y);
  console.log(c);
  c + 1;
}
```

Prints:
```
1
2
3
4
```

Evalutes to: `5`

(`perform` is a contextual keyword to "throw" and `catch effect` is a
keyword for catching it. Guest starring [Pattern Matching](https://gist.github.com/bterlson/da8f02b95b484cd4f8d9))

We've experimented with changing React's implementation to use these
internally to handle concurrency and being able to solve complex algorithms
that require a lot of back and forth such as layout calculation. It seems
to make these implementations much easier while remaining efficient.

It also allows for seamless async I/O handling by yielding deep in a fiber.

Effectively this is just solving the same thing as generator and
async/await.

However, the benefit is that you don't have a split between "async"
functions, generator functions and synchronous functions. You still have an
explicit entry point through the place where you catch effects.

With generators and async functions, anytime you want to change any deep
effects you have to unwind all potential callers. Any intermediate library
have to be turned into async functions. The refactoring is painful and
leaves you with a lot of syntax overhead.

If you want to nest different effects such as layout, iterations and async
functions that complexity explodes because now every intermediate function
has to be able to handle all those concepts.

The performance characteristics demonstrated by KC Sivaramakrishnan are
also much more promising than JS VMs has been able to do with async/await
and generators so far. It's plausible that VMs can optimize this in similar
way, in time. I suspect that the leakiness of the microtask queue might
cause problems though.

I converted the OCaml example scheduler to this ECMAScript compatible
syntax:

```js
// RoundRobinScheduler.js

class Fork {
  constructor(fn) {
    this.fn = fn;
  }
}
export function fork(f) {
  perform new Fork(f)
}

class Yield { }
export function yieldHere() {
  perform new Yield();
}

export function run(main) {
  const run_q = [];
  function enqueue(k) {
    run_q.push(k);
  }
  function dequeue() {
    if (run_q.length) {
      run_q.shift()();
    }
  }
  function spawn(f) {
    try {
      f();
      dequeue();
    } catch (e) {
      console.log(e.toString());
    } catch effect Yield -> [_, k] {
      enqueue(k);
      dequeue();
    } catch effect Fork -> [fork, k] {
      enqueue(k);
      spawn(fork.fn);
    }
  }
  spawn(main);
}
```

```js
// Example.js

import * as Sched from "RoundRobinScheduler";

function f(id, depth) {
  console.log("Starting number %i", id);
  if (depth > 0) {
    console.log("Forking number %i", id * 2 + 1);
    Sched.fork(() => f(id * 2 + 1, depth - 1));
    console.log("Forking number %i", id * 2 + 2);
    Sched.fork(() => f(id * 2 + 2, depth - 1));
  } else {
    console.log("Yielding in number %i", id);
    Sched.yieldHere();
    console.log("Resumed number %i", id);
  }
  console.log("Finishing number %i", id);
}

Sched.run(() => f(0, 2));
```
sebastian at calyptus.eu (2016-03-16T00:01:14.939Z)
Has anyone previously proposed a Algebraic Effects (e.g. Eff like handlers
of continuations) for ECMAScript? #lazyweb  I could only find variations
that aren't quite this.

I'm specifically looking at the OCaml implementation for inspiration:

http://kcsrk.info/ocaml/multicore/2015/05/20/effects-multicore/

http://kcsrk.info/slides/multicore_fb16.pdf

I'm not proposing the multicore aspect of this but just the delimited
continuations.

Basically, the idea is that you can capture effects by wrapping a call in a
"try". That spawns a fiber. Then any function can "perform" an effect as a
new language feature. That works effectively like "throw" expect it also
captures a reified continuation, in a tuple. The continuation can be
invoked to continue where you left off.

Imaginary syntax:

```js
function otherFunction() {
  console.log(1);
  let a = perform { x: 1, y: 2 };
  console.log(a);
  return a;
}

do try {
  let b = otherFunction();
  b + 1;
} catch effect -> [{ x, y }, continuation] {
  console.log(2);
  let c = continuation(x + y);
  console.log(c);
  c + 1;
}
```

Prints:
```
1
2
3
4
```

Evalutes to: `5`

(`perform` is a contextual keyword to "throw" and `catch effect` is a
keyword for catching it. Guest starring [Pattern Matching](https://gist.github.com/bterlson/da8f02b95b484cd4f8d9))

We've experimented with changing React's implementation to use these
internally to handle concurrency and being able to solve complex algorithms
that require a lot of back and forth such as layout calculation. It seems
to make these implementations much easier while remaining efficient.

It also allows for seamless async I/O handling by yielding deep in a fiber.

Effectively this is just solving the same thing as generator and
async/await.

However, the benefit is that you don't have a split between "async"
functions, generator functions and synchronous functions. You still have an
explicit entry point through the place where you catch effects.

With generators and async functions, anytime you want to change any deep
effects you have to unwind all potential callers. Any intermediate library
have to be turned into async functions. The refactoring is painful and
leaves you with a lot of syntax overhead.

If you want to nest different effects such as layout, iterations and async
functions that complexity explodes because now every intermediate function
has to be able to handle all those concepts.

The performance characteristics demonstrated by KC Sivaramakrishnan are
also much more promising than JS VMs has been able to do with async/await
and generators so far. It's plausible that VMs can optimize this in similar
way, in time. I suspect that the leakiness of the microtask queue might
cause problems though.

I converted the OCaml example scheduler to this ECMAScript compatible
syntax:

```js
// RoundRobinScheduler.js

class Fork {
  constructor(fn) {
    this.fn = fn;
  }
}
function fork(f) {
  perform new Fork(f)
}

class Yield { }
function yieldToScheduler() {
  perform new Yield();
}

export function run(main) {
  const run_q = [];
  function enqueue(k) {
    run_q.push(k);
  }
  function dequeue() {
    if (run_q.length) {
      run_q.shift()();
    }
  }
  function spawn(f) {
    try {
      f();
      dequeue();
    } catch (e) {
      console.log(e.toString());
    } catch effect Yield -> [_, k] {
      enqueue(k);
      dequeue();
    } catch effect Fork -> [fork, k] {
      enqueue(k);
      spawn(fork.fn);
    }
  }
  spawn(main);
}
```

```js
// Example.js

import * as Sched from "RoundRobinScheduler";

function f(id, depth) {
  console.log("Starting number %i", id);
  if (depth > 0) {
    console.log("Forking number %i", id * 2 + 1);
    Sched.fork(() => f(id * 2 + 1, depth - 1));
    console.log("Forking number %i", id * 2 + 2);
    Sched.fork(() => f(id * 2 + 2, depth - 1));
  } else {
    console.log("Yielding in number %i", id);
    Sched.yield();
    console.log("Resumed number %i", id);
  }
  console.log("Finishing number %i", id);
}

Sched.run(() => f(0, 2));
```
sebastian at calyptus.eu (2016-03-15T23:58:47.516Z)
Has anyone previously proposed a Algebraic Effects (e.g. Eff like handlers
of continuations) for ECMAScript? #lazyweb  I could only find variations
that aren't quite this.

I'm specifically looking at the OCaml implementation for inspiration:

http://kcsrk.info/ocaml/multicore/2015/05/20/effects-multicore/

http://kcsrk.info/slides/multicore_fb16.pdf

I'm not proposing the multicore aspect of this but just the delimited
continuations.

Basically, the idea is that you can capture effects by wrapping a call in a
"try". That spawns a fiber. Then any function can "perform" an effect as a
new language feature. That works effectively like "throw" expect it also
captures a reified continuation, in a tuple. The continuation can be
invoked to continue where you left off.

Imaginary syntax:

```js
function otherFunction() {
  console.log(1);
  let a = perform { x: 1, y: 2 };
  console.log(a);
  return a;
}

do try {
  let b = otherFunction();
  b + 1;
} catch effect -> [{ x, y }, continuation] {
  console.log(2);
  let c = continuation(x + y);
  console.log(c);
  c + 1;
}
```

Prints:
```
1
2
3
4
```

Evalutes to: `5`

(`perform` is a contextual keyword to "throw" and `catch effect` is a
keyword for catching it.)

We've experimented with changing React's implementation to use these
internally to handle concurrency and being able to solve complex algorithms
that require a lot of back and forth such as layout calculation. It seems
to make these implementations much easier while remaining efficient.

It also allows for seamless async I/O handling by yielding deep in a fiber.

Effectively this is just solving the same thing as generator and
async/await.

However, the benefit is that you don't have a split between "async"
functions, generator functions and synchronous functions. You still have an
explicit entry point through the place where you catch effects.

With generators and async functions, anytime you want to change any deep
effects you have to unwind all potential callers. Any intermediate library
have to be turned into async functions. The refactoring is painful and
leaves you with a lot of syntax overhead.

If you want to nest different effects such as layout, iterations and async
functions that complexity explodes because now every intermediate function
has to be able to handle all those concepts.

The performance characteristics demonstrated by KC Sivaramakrishnan are
also much more promising than JS VMs has been able to do with async/await
and generators so far. It's plausible that VMs can optimize this in similar
way, in time. I suspect that the leakiness of the microtask queue might
cause problems though.

I converted the OCaml example scheduler to this ECMAScript compatible
syntax:

```js
// RoundRobinScheduler.js

class Fork {
  constructor(fn) {
    this.fn = fn;
  }
}
function fork(f) {
  perform new Fork(f)
}

class Yield { }
function yieldToScheduler() {
  perform new Yield();
}

export function run(main) {
  const run_q = [];
  function enqueue(k) {
    run_q.push(k);
  }
  function dequeue() {
    if (run_q.length) {
      run_q.shift()();
    }
  }
  function spawn(f) {
    try {
      f();
      dequeue();
    } catch (e) {
      console.log(e.toString());
    } catch effect Yield -> [_, k] {
      enqueue(k);
      dequeue();
    } catch effect Fork -> [fork, k] {
      enqueue(k);
      spawn(fork.fn);
    }
  }
  spawn(main);
}
```

```js
// Example.js

import * as Sched from "RoundRobinScheduler";

function f(id, depth) {
  console.log("Starting number %i", id);
  if (depth > 0) {
    console.log("Forking number %i", id * 2 + 1);
    Sched.fork(() => f(id * 2 + 1, depth - 1));
    console.log("Forking number %i", id * 2 + 2);
    Sched.fork(() => f(id * 2 + 2, depth - 1));
  } else {
    console.log("Yielding in number %i", id);
    Sched.yield();
    console.log("Resumed number %i", id);
  }
  console.log("Finishing number %i", id);
}

Sched.run(() => f(0, 2));
```