diff --git a/book/case-finance/README.md b/book/case-finance/README.md
deleted file mode 100644
index 96e2ede..0000000
--- a/book/case-finance/README.md
+++ /dev/null
@@ -1,29 +0,0 @@
-# Case - Applications in Finance
-
-## Introduction
-
-Introduce the importance of numerical computing in Finance.
-Introduce what companies use what functional languages in their applications.
-
-
-## Bond Pricing
-
-Net present value ...
-
-
-## Black-Scholes Model
-
-### Mathematical Model
-
-### Option Pricing
-
-
-## Portfolio Optimisation
-
-Introduce Modern portfolio theory (MPT), Markowitz model.
-
-### Mathematical Model
-
-### Efficient Frontier
-
-### Maximise Sharpe Ratio
diff --git a/book/case-finance/dune b/book/case-finance/dune
deleted file mode 100644
index b70b36e..0000000
--- a/book/case-finance/dune
+++ /dev/null
@@ -1,12 +0,0 @@
-(rule
- (target dune.gen)
- (action
- (with-stdout-to %{target}
- (run ocaml-mdx rule --prelude=%{dep:prelude.ml} %{dep:README.md}))))
-
-(alias
- (name runtest)
- (deps README.md)
- (action (diff dune.inc dune.gen)))
-
-(include dune.inc)
diff --git a/book/case-finance/dune.inc b/book/case-finance/dune.inc
deleted file mode 100644
index efe32dc..0000000
--- a/book/case-finance/dune.inc
+++ /dev/null
@@ -1,9 +0,0 @@
-(alias
- (name runtest)
- (deps
- (:x README.md)
- prelude.ml)
- (action
- (progn
- (run ocaml-mdx test --prelude=prelude.ml %{x})
- (diff? %{x} %{x}.corrected))))
diff --git a/book/case-finance/prelude.ml b/book/case-finance/prelude.ml
deleted file mode 100644
index 737f02d..0000000
--- a/book/case-finance/prelude.ml
+++ /dev/null
@@ -1,9 +0,0 @@
-#require "owl-top";;
-
-open Owl
-open Bigarray
-
-let () = Printexc.record_backtrace false
-let () =
- Owl_base_stats_prng.init 89;
- Owl_stats_prng.init 89
\ No newline at end of file
diff --git a/book/case-gpu/README.md b/book/case-gpu/README.md
deleted file mode 100644
index dd3cf18..0000000
--- a/book/case-gpu/README.md
+++ /dev/null
@@ -1,19 +0,0 @@
-# Case - Using Accelerators
-
-TBD
-
-## Define DNN in Owl
-
-
-## Convert to ONNX Format
-
-
-## Model Training
-
-
-## Model Inference
-
-
-## Other Accelerator Framework
-
-
diff --git a/book/case-gpu/dune b/book/case-gpu/dune
deleted file mode 100644
index b70b36e..0000000
--- a/book/case-gpu/dune
+++ /dev/null
@@ -1,12 +0,0 @@
-(rule
- (target dune.gen)
- (action
- (with-stdout-to %{target}
- (run ocaml-mdx rule --prelude=%{dep:prelude.ml} %{dep:README.md}))))
-
-(alias
- (name runtest)
- (deps README.md)
- (action (diff dune.inc dune.gen)))
-
-(include dune.inc)
diff --git a/book/case-gpu/dune.inc b/book/case-gpu/dune.inc
deleted file mode 100644
index efe32dc..0000000
--- a/book/case-gpu/dune.inc
+++ /dev/null
@@ -1,9 +0,0 @@
-(alias
- (name runtest)
- (deps
- (:x README.md)
- prelude.ml)
- (action
- (progn
- (run ocaml-mdx test --prelude=prelude.ml %{x})
- (diff? %{x} %{x}.corrected))))
diff --git a/book/case-gpu/prelude.ml b/book/case-gpu/prelude.ml
deleted file mode 100644
index 737f02d..0000000
--- a/book/case-gpu/prelude.ml
+++ /dev/null
@@ -1,9 +0,0 @@
-#require "owl-top";;
-
-open Owl
-open Bigarray
-
-let () = Printexc.record_backtrace false
-let () =
- Owl_base_stats_prng.init 89;
- Owl_stats_prng.init 89
\ No newline at end of file
diff --git a/book/community/README.md b/book/community/README.md
deleted file mode 100644
index 1888b46..0000000
--- a/book/community/README.md
+++ /dev/null
@@ -1,5 +0,0 @@
-# Community-Driven R&D
-
-How to contribute to the community etc.
-
-TBD
diff --git a/book/community/dune b/book/community/dune
deleted file mode 100644
index b70b36e..0000000
--- a/book/community/dune
+++ /dev/null
@@ -1,12 +0,0 @@
-(rule
- (target dune.gen)
- (action
- (with-stdout-to %{target}
- (run ocaml-mdx rule --prelude=%{dep:prelude.ml} %{dep:README.md}))))
-
-(alias
- (name runtest)
- (deps README.md)
- (action (diff dune.inc dune.gen)))
-
-(include dune.inc)
diff --git a/book/community/dune.inc b/book/community/dune.inc
deleted file mode 100644
index efe32dc..0000000
--- a/book/community/dune.inc
+++ /dev/null
@@ -1,9 +0,0 @@
-(alias
- (name runtest)
- (deps
- (:x README.md)
- prelude.ml)
- (action
- (progn
- (run ocaml-mdx test --prelude=prelude.ml %{x})
- (diff? %{x} %{x}.corrected))))
diff --git a/book/community/prelude.ml b/book/community/prelude.ml
deleted file mode 100644
index 737f02d..0000000
--- a/book/community/prelude.ml
+++ /dev/null
@@ -1,9 +0,0 @@
-#require "owl-top";;
-
-open Owl
-open Bigarray
-
-let () = Printexc.record_backtrace false
-let () =
- Owl_base_stats_prng.init 89;
- Owl_stats_prng.init 89
\ No newline at end of file
diff --git a/book/core-opt/README.md b/book/core-opt/README.md
index 8fbe9f1..909b1f5 100644
--- a/book/core-opt/README.md
+++ b/book/core-opt/README.md
@@ -650,6 +650,4 @@ As can be seen, my repeat operation achieves about half of that in NumPy with re
The outer repeat operation in NumPy is implemented using the single axis version, and thus is less efficient.
The repeat operation in Julia is much slower. One reason is that `repeat` is not a computation-intensive operation, so the optimisation techniques such as static compilation and vectorisation are of less importance than algorithm design.
-## Summary
-
## References
diff --git a/book/distributed/README.md b/book/distributed/README.md
index ce13a3d..9603ac2 100644
--- a/book/distributed/README.md
+++ b/book/distributed/README.md
@@ -507,8 +507,6 @@ let push kv_pairs =
) kv_pairs
```
-TODO: Explain the code
-
![MNIST training using Actor](images/distributed/exp_accuracy_01.png){#fig:distributed:exp_accuracy_01}
In [@fig:distributed:exp_accuracy_01], we conduct the real-world experiments using 6 worker nodes with the Parameter Server framework we have implemented.
diff --git a/book/ppl/README.md b/book/ppl/README.md
deleted file mode 100644
index 0ae9c4b..0000000
--- a/book/ppl/README.md
+++ /dev/null
@@ -1,16 +0,0 @@
-# Probabilistic Programming
-
-TODO: http://edwardlib.org/tutorials/
-
-
-## Generative Model vs Discriminative Model
-
-
-## Bayesian Networks
-
-
-## Sampling Techniques
-
-TODO: MCMC, Gibbs Sampling, MH ...
-
-## Inference
diff --git a/book/ppl/dune b/book/ppl/dune
deleted file mode 100644
index b70b36e..0000000
--- a/book/ppl/dune
+++ /dev/null
@@ -1,12 +0,0 @@
-(rule
- (target dune.gen)
- (action
- (with-stdout-to %{target}
- (run ocaml-mdx rule --prelude=%{dep:prelude.ml} %{dep:README.md}))))
-
-(alias
- (name runtest)
- (deps README.md)
- (action (diff dune.inc dune.gen)))
-
-(include dune.inc)
diff --git a/book/ppl/dune.inc b/book/ppl/dune.inc
deleted file mode 100644
index efe32dc..0000000
--- a/book/ppl/dune.inc
+++ /dev/null
@@ -1,9 +0,0 @@
-(alias
- (name runtest)
- (deps
- (:x README.md)
- prelude.ml)
- (action
- (progn
- (run ocaml-mdx test --prelude=prelude.ml %{x})
- (diff? %{x} %{x}.corrected))))
diff --git a/book/ppl/prelude.ml b/book/ppl/prelude.ml
deleted file mode 100644
index 737f02d..0000000
--- a/book/ppl/prelude.ml
+++ /dev/null
@@ -1,9 +0,0 @@
-#require "owl-top";;
-
-open Owl
-open Bigarray
-
-let () = Printexc.record_backtrace false
-let () =
- Owl_base_stats_prng.init 89;
- Owl_stats_prng.init 89
\ No newline at end of file
diff --git a/docs/core-opt.html b/docs/core-opt.html
index a42a502..2c9507b 100644
--- a/docs/core-opt.html
+++ b/docs/core-opt.html
@@ -333,9 +333,6 @@
Repeat Operations
The evaluation results compared with NumPy and Julia are shown in [@fig:core-opt:opeval_repeat]. We also measure the peak memory usage in[@fig:core-opt:opeval_tp_repeat_mem_00]. As can be seen, my repeat operation achieves about half of that in NumPy with regard to both execution speed and memory usage. The outer repeat operation in NumPy is implemented using the single axis version, and thus is less efficient. The repeat operation in Julia is much slower. One reason is that repeat is not a computation-intensive operation, so the optimisation techniques such as static compilation and vectorisation are of less importance than algorithm design.