diff --git a/.nojekyll b/.nojekyll new file mode 100644 index 00000000..e69de29b diff --git a/404.html b/404.html new file mode 100644 index 00000000..9fd9039c --- /dev/null +++ b/404.html @@ -0,0 +1,817 @@ + + + +
+ + + + + + + + + + + + + + +This check is the most popular. It validates the completeness attribute of a data set. It confirms that all fields contain values different of null
.
from cuallee import Check, CheckLevel
+df = spark.range(10)
+check = Check(CheckLevel.WARNING, "CompletePredicate")
+check.is_complete("id")
+
+# Validate
+check.validate(spark, df).show(truncate=False)
+
Result:
++---+-------------------+-----------------+-------+------+-----------+-----+----+----------+---------+--------------+--------+------+
+|id |timestamp |check |level |column|rule |value|rows|violations|pass_rate|pass_threshold|metadata|status|
++---+-------------------+-----------------+-------+------+-----------+-----+----+----------+---------+--------------+--------+------+
+|1 |2022-10-09 23:45:10|CompletePredicate|WARNING|id |is_complete|N/A |10 |0 |1.0 |1.0 |{} |PASS |
++---+-------------------+-----------------+-------+------+-----------+-----+----+----------+---------+--------------+--------+------+
+
from cuallee import Check, CheckLevel
+df = spark.range(10)
+check = Check(CheckLevel.WARNING, "IsComplete")
+check.is_complete("id", .5) # Only 50% coverage
+
+# Validate
+check.validate(spark, df).show(truncate=False)
+
Result:
++---+-------------------+-----------------+-------+------+-----------+-----+----+----------+---------+--------------+--------+------+
+|id |timestamp |check |level |column|rule |value|rows|violations|pass_rate|pass_threshold|metadata|status|
++---+-------------------+-----------------+-------+------+-----------+-----+----+----------+---------+--------------+--------+------+
+|1 |2022-10-09 23:45:10|CompletePredicate|WARNING|id |is_complete|N/A |10 |0 |1.0 |0.5 |{} |PASS |
++---+-------------------+-----------------+-------+------+-----------+-----+----+----------+---------+--------------+--------+------+
+
This check is the most popular. It validates the completeness attribute of a data set. It confirms that all fields contain values different of null
.
from cuallee import Check, CheckLevel
+df = spark.range(10)
+check = Check(CheckLevel.WARNING, "CompletePredicate")
+check.is_complete("id")
+
+# Validate
+check.validate(spark, df).show(truncate=False)
+
Result:
++---+-------------------+-----------------+-------+------+-----------+-----+----+----------+---------+--------------+--------+------+
+|id |timestamp |check |level |column|rule |value|rows|violations|pass_rate|pass_threshold|metadata|status|
++---+-------------------+-----------------+-------+------+-----------+-----+----+----------+---------+--------------+--------+------+
+|1 |2022-10-09 23:45:10|CompletePredicate|WARNING|id |is_complete|N/A |10 |0 |1.0 |1.0 |{} |PASS |
++---+-------------------+-----------------+-------+------+-----------+-----+----+----------+---------+--------------+--------+------+
+
from cuallee import Check, CheckLevel
+df = spark.range(10)
+check = Check(CheckLevel.WARNING, "IsComplete")
+check.is_complete("id", .5) # Only 50% coverage
+
+# Validate
+check.validate(spark, df).show(truncate=False)
+
Result:
++---+-------------------+-----------------+-------+------+-----------+-----+----+----------+---------+--------------+--------+------+
+|id |timestamp |check |level |column|rule |value|rows|violations|pass_rate|pass_threshold|metadata|status|
++---+-------------------+-----------------+-------+------+-----------+-----+----+----------+---------+--------------+--------+------+
+|1 |2022-10-09 23:45:10|CompletePredicate|WARNING|id |is_complete|N/A |10 |0 |1.0 |0.5 |{} |PASS |
++---+-------------------+-----------------+-------+------+-----------+-----+----+----------+---------+--------------+--------+------+
+
This check is the most popular. It validates the completeness attribute of a data set. It confirms that all fields contain values different of null
.
from cuallee import Check, CheckLevel
+df = spark.range(10)
+check = Check(CheckLevel.WARNING, "CompletePredicate")
+check.is_complete("id")
+
+# Validate
+check.validate(spark, df).show(truncate=False)
+
Result:
++---+-------------------+-----------------+-------+------+-----------+-----+----+----------+---------+--------------+--------+------+
+|id |timestamp |check |level |column|rule |value|rows|violations|pass_rate|pass_threshold|metadata|status|
++---+-------------------+-----------------+-------+------+-----------+-----+----+----------+---------+--------------+--------+------+
+|1 |2022-10-09 23:45:10|CompletePredicate|WARNING|id |is_complete|N/A |10 |0 |1.0 |1.0 |{} |PASS |
++---+-------------------+-----------------+-------+------+-----------+-----+----+----------+---------+--------------+--------+------+
+
from cuallee import Check, CheckLevel
+df = spark.range(10)
+check = Check(CheckLevel.WARNING, "IsComplete")
+check.is_complete("id", .5) # Only 50% coverage
+
+# Validate
+check.validate(spark, df).show(truncate=False)
+
Result:
++---+-------------------+-----------------+-------+------+-----------+-----+----+----------+---------+--------------+--------+------+
+|id |timestamp |check |level |column|rule |value|rows|violations|pass_rate|pass_threshold|metadata|status|
++---+-------------------+-----------------+-------+------+-----------+-----+----+----------+---------+--------------+--------+------+
+|1 |2022-10-09 23:45:10|CompletePredicate|WARNING|id |is_complete|N/A |10 |0 |1.0 |0.5 |{} |PASS |
++---+-------------------+-----------------+-------+------+-----------+-----+----+----------+---------+--------------+--------+------+
+
{"use strict";/*!
+ * escape-html
+ * Copyright(c) 2012-2013 TJ Holowaychuk
+ * Copyright(c) 2015 Andreas Lubbe
+ * Copyright(c) 2015 Tiancheng "Timothy" Gu
+ * MIT Licensed
+ */var Va=/["'&<>]/;qn.exports=za;function za(e){var t=""+e,r=Va.exec(t);if(!r)return t;var o,n="",i=0,s=0;for(i=r.index;i