-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
- Loading branch information
There are no files selected for viewing
Large diffs are not rendered by default.
Large diffs are not rendered by default.
Large diffs are not rendered by default.
Large diffs are not rendered by default.
Large diffs are not rendered by default.
Large diffs are not rendered by default.
This file was deleted.
Large diffs are not rendered by default.
Large diffs are not rendered by default.
This file was deleted.
Large diffs are not rendered by default.
Large diffs are not rendered by default.
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,140 @@ | ||
<!doctype html> | ||
<html lang="en"> | ||
<head> | ||
<meta charset="utf-8"> | ||
<meta name="viewport" content="width=device-width, initial-scale=1.0"> <title>distribution/math.zig - source view</title> | ||
<link rel="icon" href="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAAAgklEQVR4AWMYWuD7EllJIM4G4g4g5oIJ/odhOJ8wToOxSTXgNxDHoeiBMfA4+wGShjyYOCkG/IGqWQziEzYAoUAeiF9D5U+DxEg14DRU7jWIT5IBIOdCxf+A+CQZAAoopEB7QJwBCBwHiip8UYmRdrAlDpIMgApwQZNnNii5Dq0MBgCxxycBnwEd+wAAAABJRU5ErkJggg=="> | ||
<link rel="icon" href="data:image/svg+xml;base64,PHN2ZyB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHZpZXdCb3g9IjAgMCAxNTMgMTQwIj48ZyBmaWxsPSIjRjdBNDFEIj48Zz48cG9seWdvbiBwb2ludHM9IjQ2LDIyIDI4LDQ0IDE5LDMwIi8+PHBvbHlnb24gcG9pbnRzPSI0NiwyMiAzMywzMyAyOCw0NCAyMiw0NCAyMiw5NSAzMSw5NSAyMCwxMDAgMTIsMTE3IDAsMTE3IDAsMjIiIHNoYXBlLXJlbmRlcmluZz0iY3Jpc3BFZGdlcyIvPjxwb2x5Z29uIHBvaW50cz0iMzEsOTUgMTIsMTE3IDQsMTA2Ii8+PC9nPjxnPjxwb2x5Z29uIHBvaW50cz0iNTYsMjIgNjIsMzYgMzcsNDQiLz48cG9seWdvbiBwb2ludHM9IjU2LDIyIDExMSwyMiAxMTEsNDQgMzcsNDQgNTYsMzIiIHNoYXBlLXJlbmRlcmluZz0iY3Jpc3BFZGdlcyIvPjxwb2x5Z29uIHBvaW50cz0iMTE2LDk1IDk3LDExNyA5MCwxMDQiLz48cG9seWdvbiBwb2ludHM9IjExNiw5NSAxMDAsMTA0IDk3LDExNyA0MiwxMTcgNDIsOTUiIHNoYXBlLXJlbmRlcmluZz0iY3Jpc3BFZGdlcyIvPjxwb2x5Z29uIHBvaW50cz0iMTUwLDAgNTIsMTE3IDMsMTQwIDEwMSwyMiIvPjwvZz48Zz48cG9seWdvbiBwb2ludHM9IjE0MSwyMiAxNDAsNDAgMTIyLDQ1Ii8+PHBvbHlnb24gcG9pbnRzPSIxNTMsMjIgMTUzLDExNyAxMDYsMTE3IDEyMCwxMDUgMTI1LDk1IDEzMSw5NSAxMzEsNDUgMTIyLDQ1IDEzMiwzNiAxNDEsMjIiIHNoYXBlLXJlbmRlcmluZz0iY3Jpc3BFZGdlcyIvPjxwb2x5Z29uIHBvaW50cz0iMTI1LDk1IDEzMCwxMTAgMTA2LDExNyIvPjwvZz48L2c+PC9zdmc+"> | ||
<style> | ||
body{ | ||
font-family: system-ui, -apple-system, Roboto, "Segoe UI", sans-serif; | ||
margin: 0; | ||
line-height: 1.5; | ||
} | ||
|
||
pre > code { | ||
display: block; | ||
overflow: auto; | ||
line-height: normal; | ||
margin: 0em; | ||
} | ||
.tok-kw { | ||
color: #333; | ||
font-weight: bold; | ||
} | ||
.tok-str { | ||
color: #d14; | ||
} | ||
.tok-builtin { | ||
color: #005C7A; | ||
} | ||
.tok-comment { | ||
color: #545454; | ||
font-style: italic; | ||
} | ||
.tok-fn { | ||
color: #900; | ||
font-weight: bold; | ||
} | ||
.tok-null { | ||
color: #005C5C; | ||
} | ||
.tok-number { | ||
color: #005C5C; | ||
} | ||
.tok-type { | ||
color: #458; | ||
font-weight: bold; | ||
} | ||
pre { | ||
counter-reset: line; | ||
} | ||
pre .line:before { | ||
counter-increment: line; | ||
content: counter(line); | ||
display: inline-block; | ||
padding-right: 1em; | ||
width: 2em; | ||
text-align: right; | ||
color: #999; | ||
} | ||
|
||
.line { | ||
width: 100%; | ||
display: inline-block; | ||
} | ||
.line:target { | ||
border-top: 1px solid #ccc; | ||
border-bottom: 1px solid #ccc; | ||
background: #fafafa; | ||
} | ||
|
||
@media (prefers-color-scheme: dark) { | ||
body{ | ||
background:#222; | ||
color: #ccc; | ||
} | ||
pre > code { | ||
color: #ccc; | ||
background: #222; | ||
border: unset; | ||
} | ||
.line:target { | ||
border-top: 1px solid #444; | ||
border-bottom: 1px solid #444; | ||
background: #333; | ||
} | ||
.tok-kw { | ||
color: #eee; | ||
} | ||
.tok-str { | ||
color: #2e5; | ||
} | ||
.tok-builtin { | ||
color: #ff894c; | ||
} | ||
.tok-comment { | ||
color: #aa7; | ||
} | ||
.tok-fn { | ||
color: #B1A0F8; | ||
} | ||
.tok-null { | ||
color: #ff8080; | ||
} | ||
.tok-number { | ||
color: #ff8080; | ||
} | ||
.tok-type { | ||
color: #68f; | ||
} | ||
} | ||
</style> | ||
</head> | ||
<body> | ||
<pre><code><span class="line" id="L1"><span class="tok-kw">const</span> std = <span class="tok-builtin">@import</span>(<span class="tok-str">"std"</span>);</span> | ||
<span class="line" id="L2"></span> | ||
<span class="line" id="L3"><span class="tok-kw">pub</span> <span class="tok-kw">fn</span> <span class="tok-fn">lbinomial</span>(n: <span class="tok-type">f64</span>, k: <span class="tok-type">f64</span>) <span class="tok-type">f64</span> {</span> | ||
<span class="line" id="L4"> <span class="tok-kw">const</span> num = std.math.lgamma(<span class="tok-type">f64</span>, n + <span class="tok-number">1</span>);</span> | ||
<span class="line" id="L5"> <span class="tok-kw">const</span> den1 = std.math.lgamma(<span class="tok-type">f64</span>, k + <span class="tok-number">1</span>);</span> | ||
<span class="line" id="L6"> <span class="tok-kw">const</span> den2 = std.math.lgamma(<span class="tok-type">f64</span>, n - k + <span class="tok-number">1</span>);</span> | ||
<span class="line" id="L7"> <span class="tok-kw">return</span> num - den1 - den2;</span> | ||
<span class="line" id="L8">}</span> | ||
<span class="line" id="L9"></span> | ||
<span class="line" id="L10"><span class="tok-kw">pub</span> <span class="tok-kw">fn</span> <span class="tok-fn">binomial</span>(n: <span class="tok-type">f64</span>, k: <span class="tok-type">f64</span>) <span class="tok-type">f64</span> {</span> | ||
<span class="line" id="L11"> <span class="tok-kw">return</span> <span class="tok-builtin">@exp</span>(lbinomial(n, k));</span> | ||
<span class="line" id="L12">}</span> | ||
<span class="line" id="L13"></span> | ||
<span class="line" id="L14"><span class="tok-kw">pub</span> <span class="tok-kw">fn</span> <span class="tok-fn">lbeta</span>(a: <span class="tok-type">f64</span>, b: <span class="tok-type">f64</span>) <span class="tok-type">f64</span> {</span> | ||
<span class="line" id="L15"> <span class="tok-kw">const</span> num1 = std.math.lgamma(<span class="tok-type">f64</span>, a);</span> | ||
<span class="line" id="L16"> <span class="tok-kw">const</span> num2 = std.math.lgamma(<span class="tok-type">f64</span>, b);</span> | ||
<span class="line" id="L17"> <span class="tok-kw">const</span> den = std.math.lgamma(<span class="tok-type">f64</span>, a + b);</span> | ||
<span class="line" id="L18"> <span class="tok-kw">return</span> num1 + num2 - den;</span> | ||
<span class="line" id="L19">}</span> | ||
<span class="line" id="L20"></span> | ||
<span class="line" id="L21"><span class="tok-kw">pub</span> <span class="tok-kw">fn</span> <span class="tok-fn">beta</span>(a: <span class="tok-type">f64</span>, b: <span class="tok-type">f64</span>) <span class="tok-type">f64</span> {</span> | ||
<span class="line" id="L22"> <span class="tok-kw">return</span> <span class="tok-builtin">@exp</span>(lbeta(a, b));</span> | ||
<span class="line" id="L23">}</span> | ||
<span class="line" id="L24"></span> | ||
</code></pre></body> | ||
</html> |